This notebook is similar to the DNN notebook, however, it contains 3 volume (low, medium, high) results in the 10-fold cross validation sections.
import joblib
import plotly
import optuna
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import random_split
import torchvision
import torchvision.transforms as transforms
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.pipeline import Pipeline
from sklearn.model_selection import KFold, StratifiedKFold
from keras.layers import Dense, Dropout, BatchNormalization
from keras.metrics import RootMeanSquaredError
from keras.models import Sequential
from keras.optimizers import Adam
from pprint import pprint
from patsy import dmatrices
import pandas as pd
import numpy as np
import os
import time
import math
import pickle
import random
from ann_visualizer.visualize import ann_viz
from matplotlib import pyplot as plt
import matplotlib.style as style
style.use('fivethirtyeight')
%matplotlib inline
def MAPE_pytorch(y_true, pred):
return torch.mean(torch.abs((y_true - pred) / y_true)) * 100
def RMSE_pytorch(y_true, pred):
return torch.sqrt(F.mse_loss(pred, y_true))
def R2_pytorch(y_true, pred):
target_mean = torch.mean(y_true)
ss_tot = torch.sum((y_true - target_mean) ** 2)
ss_res = torch.sum((y_true - pred) ** 2)
r2 = 1 - (ss_res/ss_tot)
return r2
def rmse(targets, predictions, **kwargs):
targets, predictions = np.array(targets), np.array(predictions)
return np.sqrt(mean_squared_error(targets, predictions))
def evaluate(model, test_features, test_labels):
predictions = model.predict(test_features)
errors = abs(predictions - test_labels)
mape = 100 * np.mean(errors / test_labels)
accuracy = 100 - mape
print('Average Error: {:0.4f} degrees.'.format(np.mean(errors)))
print('Accuracy = {:0.2f}%.'.format(accuracy))
return accuracy
def mean_absolute_percentage_error(y_true, y_pred, **kwargs):
# src: https://stats.stackexchange.com/questions/58391/mean-absolute-percentage-error-mape-in-scikit-learn
y_true, y_pred = np.array(y_true), np.array(y_pred)
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
d191=pd.read_csv("data/cluster2_daily_2019.csv")
d192=pd.read_csv("data/cluster3_daily_2019.csv")
d181=pd.read_csv("data/cluster2_daily_2018.csv")
d182=pd.read_csv("data/cluster3_daily_2018.csv")
d171=pd.read_csv("data/cluster2_daily_2017.csv")
d172=pd.read_csv("data/cluster3_daily_2017.csv")
print(f"d191.shape: {d191.shape}, d191.shape: {d191.shape}\nd181.shape: {d181.shape}, d182.shape: {d182.shape}\nd171.shape: {d171.shape}, d172.shape: {d172.shape}")
d191.shape: (1483, 57), d191.shape: (1483, 57) d181.shape: (1299, 57), d182.shape: (579, 57) d171.shape: (1332, 57), d172.shape: (1079, 57)
# Combine all three years data together
all_data = pd.concat([d191,d192,d181,d182,d171,d172,], axis=0)
data_dropped = all_data.drop(["Date","Unnamed: 0","Vendor_Site_Id","X","i.X","i.X.1","X.1","factor","STATION", "newdate","Total_ADTVolume","year","month","day","fit.cluster"], axis=1)
data_dropped.shape
(6746, 46)
# Replace all NAN value with Zero
data_filled = data_dropped.fillna(0)
data_filled.describe()
| OD_line | Destination | Origin | Bike.lane..ft. | Buffered.bike.lane..ft. | Enhanced.shared.roadway..ft. | Protected.bike.lane..ft. | Neighbor.green.way..ft. | Off.street.path.trail..ft. | Primary.Arterial..ft. | ... | pct_of_African.American.population | pct_of_White.population | Meadian.Household.Income.000.. | Education | Avg.Temp | Avg.Humidity | PreciP | Weekend | DailyVolume | strava_volume | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | ... | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 | 6746.000000 |
| mean | 24.565965 | 4.370738 | 4.370145 | 1174.402462 | 633.900902 | 5.865052 | 620.725648 | 307.992118 | 9626.805171 | 1665.836941 | ... | 5.479986 | 79.249231 | 65.773941 | 70.687779 | 55.419609 | 73.843940 | 0.031055 | 0.288615 | 1066.441891 | 67.318411 |
| std | 42.551007 | 7.792964 | 7.318313 | 1534.973316 | 791.001101 | 66.246042 | 854.051183 | 756.943964 | 9714.966405 | 1999.010159 | ... | 3.307419 | 3.609301 | 110.540980 | 8.024137 | 11.520078 | 13.766100 | 0.120453 | 0.453152 | 704.181434 | 53.252605 |
| min | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | ... | 0.000000 | 61.926000 | 9.720000 | 43.990087 | 21.200000 | 16.000000 | 0.000000 | 0.000000 | 10.000000 | 0.000000 |
| 25% | 3.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 2132.568621 | 0.000000 | ... | 2.653333 | 76.234000 | 44.972000 | 66.723930 | 46.296296 | 63.777778 | 0.000000 | 0.000000 | 483.250000 | 25.000000 |
| 50% | 11.000000 | 0.000000 | 1.000000 | 0.000000 | 0.000000 | 0.000000 | 327.275382 | 0.000000 | 5099.161603 | 1297.549800 | ... | 6.310000 | 78.577000 | 45.337000 | 68.890280 | 54.391176 | 74.000000 | 0.000000 | 0.000000 | 954.000000 | 55.000000 |
| 75% | 28.000000 | 6.000000 | 6.000000 | 1926.701848 | 950.154336 | 0.000000 | 675.665306 | 0.000000 | 14759.064020 | 2132.796300 | ... | 8.360000 | 82.245000 | 61.263330 | 74.633750 | 64.833333 | 85.380952 | 0.006304 | 1.000000 | 1508.750000 | 95.000000 |
| max | 916.000000 | 82.000000 | 80.000000 | 4893.294830 | 3463.680000 | 1219.680000 | 2813.512562 | 13273.920000 | 42392.719290 | 9149.269442 | ... | 26.474000 | 96.028000 | 2902.380000 | 85.472000 | 89.000000 | 100.000000 | 2.290000 | 1.000000 | 3136.000000 | 315.000000 |
8 rows × 46 columns
# split data into X dataframe and Y dataframe:
X_data = data_filled.drop("DailyVolume", axis=1)
Y_data = data_filled["DailyVolume"]
print(f"X_data.shape: {X_data.shape} Y_data.shape: {Y_data.shape}")
X_data.shape: (6746, 45) Y_data.shape: (6746,)
X = np.asarray(X_data)
Y = np.asarray(Y_data)
print(f"X.shape: {X.shape} Y.shape: {Y.shape}")
X.shape: (6746, 45) Y.shape: (6746,)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2, random_state=42)
# convert np arrays to tensor, with float.
X_train = torch.from_numpy(X_train).float()
X_test = torch.from_numpy(X_test).float()
Y_train = torch.reshape(torch.from_numpy(Y_train).float(), (5396, 1))
Y_test = torch.reshape(torch.from_numpy(Y_test).float(), (1350, 1))
print(f"X_train.shape={X_train.shape} X_test.shape={X_test.shape}\nY_train.shape={Y_train.shape} Y_test.shape={Y_test.shape}")
X_train.shape=torch.Size([5396, 45]) X_test.shape=torch.Size([1350, 45]) Y_train.shape=torch.Size([5396, 1]) Y_test.shape=torch.Size([1350, 1])
pipe = Pipeline([("scaler", StandardScaler()),
("pca", PCA(n_components=25))])
PCA_X = pipe.fit_transform(X)
PCA_X_train, PCA_X_test, PCA_Y_train, PCA_Y_test = train_test_split(PCA_X, Y, test_size=0.2, random_state=42)
# convert np arrays to tensor, with float.
PCA_X_train = torch.from_numpy(PCA_X_train).float()
PCA_X_test = torch.from_numpy(PCA_X_test).float()
PCA_Y_train = torch.reshape(torch.from_numpy(PCA_Y_train).float(), (5396, 1))
PCA_Y_test = torch.reshape(torch.from_numpy(PCA_Y_test).float(), (1350, 1))
print(f"PCA_X_train.shape={PCA_X_train.shape} PCA_X_test.shape={PCA_X_test.shape}\nPCA_Y_train.shape={PCA_Y_train.shape} PCA_Y_test.shape={PCA_Y_test.shape}")
PCA_X_train.shape=torch.Size([5396, 25]) PCA_X_test.shape=torch.Size([1350, 25]) PCA_Y_train.shape=torch.Size([5396, 1]) PCA_Y_test.shape=torch.Size([1350, 1])
study_name = "dnn_45_var_100_nrn_1st_layer"
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
n_epochs = 100
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def objective_fn(trial):
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_float("lr", 1e-3, 1e-1, log=True)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
# n_layers = trial.suggest_int('n_layers', 2, 5)
# layers = []
# in_features = 45
# out_features = 0
# max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
# for i in range(n_layers):
# out_features = int(max_nrns)
# layers.append(torch.nn.Linear(in_features, out_features))
# activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
# if activation == "relu":
# layers.append(torch.nn.ReLU())
# in_features = out_features
# # to prevent the last layer being Linear(0,1)
# if max_nrns > 2:
# max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
# layers.append(torch.nn.Linear(out_features, 1))
layers = []
layers.append(torch.nn.Linear(45, 100))
activation = trial.suggest_categorical("HL0_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(100, 50))
activation = trial.suggest_categorical("HL1_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(50, 25))
activation = trial.suggest_categorical("HL2_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(25, 12))
activation = trial.suggest_categorical("HL3_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(12, 1))
dnn_model = torch.nn.Sequential(*layers).to(device)
# print(dnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(dnn_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
dnn_model.train()
# we create a random permutation of numbers from X_train.size()
permutation = torch.randperm(X_train.size()[0])
# we go through the batches.
for i in range(0, X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = X_train[indices], Y_train[indices]
# input X_train into dnn and get predictions.
train_prediction = dnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
# now we test the model:
dnn_model.eval()
test_prediction = dnn_model(X_test.to(device))
test_loss = loss_fn(test_prediction, Y_test.to(device))
# if the current test loss determines the trial to be prunable, we prune it:
trial.report(test_loss, step=epoch)
if trial.should_prune():
raise optuna.TrialPruned()
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return test_loss
%%time
study = optuna.create_study(sampler=optuna.samplers.TPESampler(seed=42),study_name=study_name, direction='minimize')
study.optimize(objective_fn, n_trials=5000)
[I 2021-05-10 06:15:40,600] A new study created in memory with name: dnn_45_var_100_nrn_1st_layer
[I 2021-05-10 06:16:33,487] Trial 0 finished with value: 381.4081115722656 and parameters: {'lr': 0.005611516415334507, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 0 with value: 381.4081115722656.
[I 2021-05-10 06:16:39,933] Trial 1 finished with value: 388.0707702636719 and parameters: {'lr': 0.0026587543983272706, 'batch_size': 128, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 0 with value: 381.4081115722656.
[I 2021-05-10 06:16:46,369] Trial 2 finished with value: 394.2862548828125 and parameters: {'lr': 0.002508115686045232, 'batch_size': 128, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 0 with value: 381.4081115722656.
[I 2021-05-10 06:16:52,835] Trial 3 finished with value: 378.56146240234375 and parameters: {'lr': 0.007591104805282696, 'batch_size': 128, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 3 with value: 378.56146240234375.
[I 2021-05-10 06:17:05,423] Trial 4 finished with value: 643.1636962890625 and parameters: {'lr': 0.0756829206016762, 'batch_size': 64, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu'}. Best is trial 3 with value: 378.56146240234375.
[I 2021-05-10 06:17:05,497] Trial 5 pruned.
[I 2021-05-10 06:17:05,748] Trial 6 pruned.
[I 2021-05-10 06:17:05,884] Trial 7 pruned.
[I 2021-05-10 06:17:06,021] Trial 8 pruned.
[I 2021-05-10 06:17:06,094] Trial 9 pruned.
[I 2021-05-10 06:17:07,135] Trial 10 pruned.
[I 2021-05-10 06:17:12,246] Trial 11 pruned.
[I 2021-05-10 06:17:13,324] Trial 12 pruned.
[I 2021-05-10 06:17:22,980] Trial 13 pruned.
[I 2021-05-10 06:17:23,493] Trial 14 pruned.
[I 2021-05-10 06:18:14,824] Trial 15 finished with value: 385.4799499511719 and parameters: {'lr': 0.00402805701681747, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 3 with value: 378.56146240234375.
[I 2021-05-10 06:18:14,896] Trial 16 pruned.
[I 2021-05-10 06:18:15,399] Trial 17 pruned.
[I 2021-05-10 06:18:15,470] Trial 18 pruned.
[I 2021-05-10 06:18:16,472] Trial 19 pruned.
[I 2021-05-10 06:18:17,720] Trial 20 pruned.
[I 2021-05-10 06:19:09,239] Trial 21 finished with value: 365.8607482910156 and parameters: {'lr': 0.0042087201561840475, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:19:12,332] Trial 22 pruned.
[I 2021-05-10 06:19:13,873] Trial 23 pruned.
[I 2021-05-10 06:19:16,954] Trial 24 pruned.
[I 2021-05-10 06:19:20,050] Trial 25 pruned.
[I 2021-05-10 06:19:25,614] Trial 26 pruned.
[I 2021-05-10 06:20:16,331] Trial 27 finished with value: 379.90203857421875 and parameters: {'lr': 0.003226751452383439, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:20:16,729] Trial 28 pruned.
[I 2021-05-10 06:20:21,719] Trial 29 pruned.
[I 2021-05-10 06:20:22,211] Trial 30 pruned.
[I 2021-05-10 06:21:13,058] Trial 31 finished with value: 375.6762390136719 and parameters: {'lr': 0.006125412341652612, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:21:13,580] Trial 32 pruned.
[I 2021-05-10 06:21:15,633] Trial 33 pruned.
[I 2021-05-10 06:21:16,139] Trial 34 pruned.
[I 2021-05-10 06:21:16,214] Trial 35 pruned.
[I 2021-05-10 06:21:28,050] Trial 36 pruned.
[I 2021-05-10 06:21:31,599] Trial 37 pruned.
[I 2021-05-10 06:21:31,669] Trial 38 pruned.
[I 2021-05-10 06:21:36,692] Trial 39 pruned.
[I 2021-05-10 06:21:39,222] Trial 40 pruned.
[I 2021-05-10 06:21:42,869] Trial 41 pruned.
[I 2021-05-10 06:21:48,070] Trial 42 pruned.
[I 2021-05-10 06:21:48,586] Trial 43 pruned.
[I 2021-05-10 06:21:48,839] Trial 44 pruned.
[I 2021-05-10 06:21:48,915] Trial 45 pruned.
[I 2021-05-10 06:21:49,051] Trial 46 pruned.
[I 2021-05-10 06:21:49,573] Trial 47 pruned.
[I 2021-05-10 06:21:54,712] Trial 48 pruned.
[I 2021-05-10 06:21:54,787] Trial 49 pruned.
[I 2021-05-10 06:21:55,282] Trial 50 pruned.
[I 2021-05-10 06:22:46,991] Trial 51 finished with value: 383.86773681640625 and parameters: {'lr': 0.003949029506531702, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:22:47,518] Trial 52 pruned.
[I 2021-05-10 06:22:48,040] Trial 53 pruned.
[I 2021-05-10 06:22:48,571] Trial 54 pruned.
[I 2021-05-10 06:23:40,015] Trial 55 finished with value: 374.426513671875 and parameters: {'lr': 0.0012592250529807547, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:23:41,525] Trial 56 pruned.
[I 2021-05-10 06:23:52,923] Trial 57 pruned.
[I 2021-05-10 06:23:53,001] Trial 58 pruned.
[I 2021-05-10 06:23:53,498] Trial 59 pruned.
[I 2021-05-10 06:23:54,014] Trial 60 pruned.
[I 2021-05-10 06:23:55,552] Trial 61 pruned.
[I 2021-05-10 06:23:57,099] Trial 62 pruned.
[I 2021-05-10 06:23:57,615] Trial 63 pruned.
[I 2021-05-10 06:23:58,133] Trial 64 pruned.
[I 2021-05-10 06:24:49,727] Trial 65 finished with value: 378.156494140625 and parameters: {'lr': 0.00400821677441094, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:25:41,641] Trial 66 finished with value: 376.3600769042969 and parameters: {'lr': 0.005793487018667471, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 21 with value: 365.8607482910156.
[I 2021-05-10 06:25:42,182] Trial 67 pruned.
[I 2021-05-10 06:25:42,320] Trial 68 pruned.
[I 2021-05-10 06:25:42,464] Trial 69 pruned.
[I 2021-05-10 06:25:42,972] Trial 70 pruned.
[I 2021-05-10 06:25:43,490] Trial 71 pruned.
[I 2021-05-10 06:26:35,452] Trial 72 finished with value: 362.7355651855469 and parameters: {'lr': 0.006250169080446751, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:26:35,979] Trial 73 pruned.
[I 2021-05-10 06:26:37,539] Trial 74 pruned.
[I 2021-05-10 06:26:38,053] Trial 75 pruned.
[I 2021-05-10 06:26:39,614] Trial 76 pruned.
[I 2021-05-10 06:26:41,186] Trial 77 pruned.
[I 2021-05-10 06:27:32,043] Trial 78 finished with value: 387.2450866699219 and parameters: {'lr': 0.0048230565044267305, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:27:32,586] Trial 79 pruned.
[I 2021-05-10 06:27:32,661] Trial 80 pruned.
[I 2021-05-10 06:27:33,189] Trial 81 pruned.
[I 2021-05-10 06:27:33,725] Trial 82 pruned.
[I 2021-05-10 06:27:34,241] Trial 83 pruned.
[I 2021-05-10 06:27:57,542] Trial 84 pruned.
[I 2021-05-10 06:27:58,061] Trial 85 pruned.
[I 2021-05-10 06:28:01,201] Trial 86 pruned.
[I 2021-05-10 06:28:01,335] Trial 87 pruned.
[I 2021-05-10 06:28:01,410] Trial 88 pruned.
[I 2021-05-10 06:28:02,437] Trial 89 pruned.
[I 2021-05-10 06:28:02,951] Trial 90 pruned.
[I 2021-05-10 06:28:03,474] Trial 91 pruned.
[I 2021-05-10 06:28:05,041] Trial 92 pruned.
[I 2021-05-10 06:28:05,561] Trial 93 pruned.
[I 2021-05-10 06:28:06,079] Trial 94 pruned.
[I 2021-05-10 06:28:06,611] Trial 95 pruned.
[I 2021-05-10 06:28:07,127] Trial 96 pruned.
[I 2021-05-10 06:28:07,648] Trial 97 pruned.
[I 2021-05-10 06:28:07,721] Trial 98 pruned.
[I 2021-05-10 06:28:08,238] Trial 99 pruned.
[I 2021-05-10 06:28:08,527] Trial 100 pruned.
[I 2021-05-10 06:28:09,667] Trial 101 pruned.
[I 2021-05-10 06:28:10,194] Trial 102 pruned.
[I 2021-05-10 06:28:10,716] Trial 103 pruned.
[I 2021-05-10 06:28:11,260] Trial 104 pruned.
[I 2021-05-10 06:28:11,798] Trial 105 pruned.
[I 2021-05-10 06:28:12,920] Trial 106 pruned.
[I 2021-05-10 06:28:13,481] Trial 107 pruned.
[I 2021-05-10 06:28:14,017] Trial 108 pruned.
[I 2021-05-10 06:28:14,090] Trial 109 pruned.
[I 2021-05-10 06:29:06,501] Trial 110 finished with value: 747.313720703125 and parameters: {'lr': 0.005851221516887531, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:29:07,011] Trial 111 pruned.
[I 2021-05-10 06:29:07,519] Trial 112 pruned.
[I 2021-05-10 06:29:09,023] Trial 113 pruned.
[I 2021-05-10 06:29:59,921] Trial 114 finished with value: 376.53839111328125 and parameters: {'lr': 0.003314756252448273, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:30:00,933] Trial 115 pruned.
[I 2021-05-10 06:30:01,069] Trial 116 pruned.
[I 2021-05-10 06:30:01,590] Trial 117 pruned.
[I 2021-05-10 06:30:05,640] Trial 118 pruned.
[I 2021-05-10 06:30:06,161] Trial 119 pruned.
[I 2021-05-10 06:30:06,667] Trial 120 pruned.
[I 2021-05-10 06:30:07,178] Trial 121 pruned.
[I 2021-05-10 06:30:07,684] Trial 122 pruned.
[I 2021-05-10 06:30:08,193] Trial 123 pruned.
[I 2021-05-10 06:30:08,702] Trial 124 pruned.
[I 2021-05-10 06:30:09,211] Trial 125 pruned.
[I 2021-05-10 06:30:09,286] Trial 126 pruned.
[I 2021-05-10 06:30:09,788] Trial 127 pruned.
[I 2021-05-10 06:30:10,308] Trial 128 pruned.
[I 2021-05-10 06:30:35,836] Trial 129 finished with value: 385.39105224609375 and parameters: {'lr': 0.006052185408541417, 'batch_size': 32, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:30:36,105] Trial 130 pruned.
[I 2021-05-10 06:30:36,876] Trial 131 pruned.
[I 2021-05-10 06:31:02,295] Trial 132 finished with value: 406.4941711425781 and parameters: {'lr': 0.007670552141935292, 'batch_size': 32, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:31:52,994] Trial 133 finished with value: 371.8350524902344 and parameters: {'lr': 0.004171056415700489, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:31:53,255] Trial 134 pruned.
[I 2021-05-10 06:31:53,518] Trial 135 pruned.
[I 2021-05-10 06:31:54,036] Trial 136 pruned.
[I 2021-05-10 06:31:54,562] Trial 137 pruned.
[I 2021-05-10 06:31:55,585] Trial 138 pruned.
[I 2021-05-10 06:31:55,662] Trial 139 pruned.
[I 2021-05-10 06:32:47,618] Trial 140 finished with value: 368.9266662597656 and parameters: {'lr': 0.003742506913809314, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:32:48,141] Trial 141 pruned.
[I 2021-05-10 06:32:48,669] Trial 142 pruned.
[I 2021-05-10 06:32:49,192] Trial 143 pruned.
[I 2021-05-10 06:32:50,795] Trial 144 pruned.
[I 2021-05-10 06:32:51,879] Trial 145 pruned.
[I 2021-05-10 06:32:52,974] Trial 146 pruned.
[I 2021-05-10 06:33:43,774] Trial 147 pruned.
[I 2021-05-10 06:33:44,805] Trial 148 pruned.
[I 2021-05-10 06:33:44,940] Trial 149 pruned.
[I 2021-05-10 06:33:45,206] Trial 150 pruned.
[I 2021-05-10 06:33:45,716] Trial 151 pruned.
[I 2021-05-10 06:33:46,223] Trial 152 pruned.
[I 2021-05-10 06:33:46,738] Trial 153 pruned.
[I 2021-05-10 06:33:47,251] Trial 154 pruned.
[I 2021-05-10 06:33:47,758] Trial 155 pruned.
[I 2021-05-10 06:34:38,472] Trial 156 finished with value: 382.3679504394531 and parameters: {'lr': 0.0034728020995216744, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:34:38,985] Trial 157 pruned.
[I 2021-05-10 06:34:39,483] Trial 158 pruned.
[I 2021-05-10 06:34:41,007] Trial 159 pruned.
[I 2021-05-10 06:34:41,079] Trial 160 pruned.
[I 2021-05-10 06:34:41,592] Trial 161 pruned.
[I 2021-05-10 06:34:42,620] Trial 162 pruned.
[I 2021-05-10 06:34:43,130] Trial 163 pruned.
[I 2021-05-10 06:34:44,634] Trial 164 pruned.
[I 2021-05-10 06:34:45,149] Trial 165 pruned.
[I 2021-05-10 06:34:46,701] Trial 166 pruned.
[I 2021-05-10 06:34:47,225] Trial 167 pruned.
[I 2021-05-10 06:34:51,319] Trial 168 pruned.
[I 2021-05-10 06:35:42,865] Trial 169 finished with value: 386.02142333984375 and parameters: {'lr': 0.0037052087174824127, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:35:43,385] Trial 170 pruned.
[I 2021-05-10 06:35:43,901] Trial 171 pruned.
[I 2021-05-10 06:35:44,422] Trial 172 pruned.
[I 2021-05-10 06:35:44,945] Trial 173 pruned.
[I 2021-05-10 06:35:45,462] Trial 174 pruned.
[I 2021-05-10 06:35:45,982] Trial 175 pruned.
[I 2021-05-10 06:35:50,032] Trial 176 pruned.
[I 2021-05-10 06:35:50,298] Trial 177 pruned.
[I 2021-05-10 06:35:50,801] Trial 178 pruned.
[I 2021-05-10 06:35:50,875] Trial 179 pruned.
[I 2021-05-10 06:35:51,909] Trial 180 pruned.
[I 2021-05-10 06:35:51,983] Trial 181 pruned.
[I 2021-05-10 06:35:52,056] Trial 182 pruned.
[I 2021-05-10 06:35:52,128] Trial 183 pruned.
[I 2021-05-10 06:35:52,202] Trial 184 pruned.
[I 2021-05-10 06:35:52,699] Trial 185 pruned.
[I 2021-05-10 06:35:53,214] Trial 186 pruned.
[I 2021-05-10 06:35:53,349] Trial 187 pruned.
[I 2021-05-10 06:35:57,448] Trial 188 pruned.
[I 2021-05-10 06:35:57,971] Trial 189 pruned.
[I 2021-05-10 06:35:58,044] Trial 190 pruned.
[I 2021-05-10 06:35:58,116] Trial 191 pruned.
[I 2021-05-10 06:35:58,190] Trial 192 pruned.
[I 2021-05-10 06:35:58,263] Trial 193 pruned.
[I 2021-05-10 06:35:58,336] Trial 194 pruned.
[I 2021-05-10 06:35:58,851] Trial 195 pruned.
[I 2021-05-10 06:35:59,619] Trial 196 pruned.
[I 2021-05-10 06:36:00,134] Trial 197 pruned.
[I 2021-05-10 06:36:01,653] Trial 198 pruned.
[I 2021-05-10 06:36:02,180] Trial 199 pruned.
[I 2021-05-10 06:36:02,255] Trial 200 pruned.
[I 2021-05-10 06:36:02,518] Trial 201 pruned.
[I 2021-05-10 06:36:03,030] Trial 202 pruned.
[I 2021-05-10 06:36:03,290] Trial 203 pruned.
[I 2021-05-10 06:36:03,553] Trial 204 pruned.
[I 2021-05-10 06:36:03,812] Trial 205 pruned.
[I 2021-05-10 06:36:04,325] Trial 206 pruned.
[I 2021-05-10 06:36:04,848] Trial 207 pruned.
[I 2021-05-10 06:36:05,359] Trial 208 pruned.
[I 2021-05-10 06:36:05,856] Trial 209 pruned.
[I 2021-05-10 06:36:06,376] Trial 210 pruned.
[I 2021-05-10 06:36:06,511] Trial 211 pruned.
[I 2021-05-10 06:36:06,645] Trial 212 pruned.
[I 2021-05-10 06:36:06,780] Trial 213 pruned.
[I 2021-05-10 06:36:06,914] Trial 214 pruned.
[I 2021-05-10 06:36:07,438] Trial 215 pruned.
[I 2021-05-10 06:36:07,569] Trial 216 pruned.
[I 2021-05-10 06:36:07,832] Trial 217 pruned.
[I 2021-05-10 06:36:58,558] Trial 218 finished with value: 370.57861328125 and parameters: {'lr': 0.0038956176655396513, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:37:00,078] Trial 219 pruned.
[I 2021-05-10 06:37:00,586] Trial 220 pruned.
[I 2021-05-10 06:37:01,094] Trial 221 pruned.
[I 2021-05-10 06:37:01,605] Trial 222 pruned.
[I 2021-05-10 06:37:52,307] Trial 223 finished with value: 369.4765625 and parameters: {'lr': 0.003919353564919839, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:37:52,814] Trial 224 pruned.
[I 2021-05-10 06:37:53,324] Trial 225 pruned.
[I 2021-05-10 06:37:53,832] Trial 226 pruned.
[I 2021-05-10 06:37:54,343] Trial 227 pruned.
[I 2021-05-10 06:37:54,850] Trial 228 pruned.
[I 2021-05-10 06:37:54,924] Trial 229 pruned.
[I 2021-05-10 06:37:55,438] Trial 230 pruned.
[I 2021-05-10 06:38:46,848] Trial 231 finished with value: 363.45135498046875 and parameters: {'lr': 0.0042927589775429665, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:38:47,372] Trial 232 pruned.
[I 2021-05-10 06:38:47,892] Trial 233 pruned.
[I 2021-05-10 06:39:39,391] Trial 234 finished with value: 407.3536071777344 and parameters: {'lr': 0.004553675403982262, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:39:39,899] Trial 235 pruned.
[I 2021-05-10 06:39:40,420] Trial 236 pruned.
[I 2021-05-10 06:39:40,933] Trial 237 pruned.
[I 2021-05-10 06:39:41,452] Trial 238 pruned.
[I 2021-05-10 06:39:41,968] Trial 239 pruned.
[I 2021-05-10 06:39:42,489] Trial 240 pruned.
[I 2021-05-10 06:39:43,006] Trial 241 pruned.
[I 2021-05-10 06:39:43,525] Trial 242 pruned.
[I 2021-05-10 06:39:44,045] Trial 243 pruned.
[I 2021-05-10 06:40:35,486] Trial 244 finished with value: 380.7868347167969 and parameters: {'lr': 0.003936927419224988, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:40:35,563] Trial 245 pruned.
[I 2021-05-10 06:40:36,071] Trial 246 pruned.
[I 2021-05-10 06:40:36,578] Trial 247 pruned.
[I 2021-05-10 06:40:37,093] Trial 248 pruned.
[I 2021-05-10 06:40:37,609] Trial 249 pruned.
[I 2021-05-10 06:40:37,871] Trial 250 pruned.
[I 2021-05-10 06:40:38,390] Trial 251 pruned.
[I 2021-05-10 06:40:38,466] Trial 252 pruned.
[I 2021-05-10 06:40:40,000] Trial 253 pruned.
[I 2021-05-10 06:40:40,516] Trial 254 pruned.
[I 2021-05-10 06:40:41,033] Trial 255 pruned.
[I 2021-05-10 06:40:41,555] Trial 256 pruned.
[I 2021-05-10 06:40:41,816] Trial 257 pruned.
[I 2021-05-10 06:41:33,295] Trial 258 finished with value: 382.1874084472656 and parameters: {'lr': 0.006518517989269028, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:41:33,816] Trial 259 pruned.
[I 2021-05-10 06:41:34,328] Trial 260 pruned.
[I 2021-05-10 06:41:34,846] Trial 261 pruned.
[I 2021-05-10 06:41:35,367] Trial 262 pruned.
[I 2021-05-10 06:41:35,887] Trial 263 pruned.
[I 2021-05-10 06:41:35,963] Trial 264 pruned.
[I 2021-05-10 06:41:36,482] Trial 265 pruned.
[I 2021-05-10 06:41:37,000] Trial 266 pruned.
[I 2021-05-10 06:41:37,505] Trial 267 pruned.
[I 2021-05-10 06:41:42,642] Trial 268 pruned.
[I 2021-05-10 06:41:43,164] Trial 269 pruned.
[I 2021-05-10 06:41:43,684] Trial 270 pruned.
[I 2021-05-10 06:41:43,760] Trial 271 pruned.
[I 2021-05-10 06:41:44,268] Trial 272 pruned.
[I 2021-05-10 06:41:44,775] Trial 273 pruned.
[I 2021-05-10 06:41:45,288] Trial 274 pruned.
[I 2021-05-10 06:41:45,796] Trial 275 pruned.
[I 2021-05-10 06:41:46,304] Trial 276 pruned.
[I 2021-05-10 06:41:46,381] Trial 277 pruned.
[I 2021-05-10 06:41:47,922] Trial 278 pruned.
[I 2021-05-10 06:41:48,436] Trial 279 pruned.
[I 2021-05-10 06:41:48,950] Trial 280 pruned.
[I 2021-05-10 06:41:49,472] Trial 281 pruned.
[I 2021-05-10 06:41:49,991] Trial 282 pruned.
[I 2021-05-10 06:42:40,677] Trial 283 finished with value: 378.3438415527344 and parameters: {'lr': 0.004453971214620654, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:42:42,206] Trial 284 pruned.
[I 2021-05-10 06:42:42,728] Trial 285 pruned.
[I 2021-05-10 06:42:43,252] Trial 286 pruned.
[I 2021-05-10 06:42:44,770] Trial 287 pruned.
[I 2021-05-10 06:42:45,292] Trial 288 pruned.
[I 2021-05-10 06:42:45,791] Trial 289 pruned.
[I 2021-05-10 06:43:37,570] Trial 290 finished with value: 462.416015625 and parameters: {'lr': 0.0044524316637840145, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:43:38,085] Trial 291 pruned.
[I 2021-05-10 06:43:38,609] Trial 292 pruned.
[I 2021-05-10 06:43:39,134] Trial 293 pruned.
[I 2021-05-10 06:43:39,643] Trial 294 pruned.
[I 2021-05-10 06:43:40,159] Trial 295 pruned.
[I 2021-05-10 06:43:40,671] Trial 296 pruned.
[I 2021-05-10 06:43:41,196] Trial 297 pruned.
[I 2021-05-10 06:43:41,717] Trial 298 pruned.
[I 2021-05-10 06:43:42,226] Trial 299 pruned.
[I 2021-05-10 06:43:42,738] Trial 300 pruned.
[I 2021-05-10 06:43:43,252] Trial 301 pruned.
[I 2021-05-10 06:43:43,772] Trial 302 pruned.
[I 2021-05-10 06:43:44,283] Trial 303 pruned.
[I 2021-05-10 06:43:44,795] Trial 304 pruned.
[I 2021-05-10 06:43:45,317] Trial 305 pruned.
[I 2021-05-10 06:43:45,826] Trial 306 pruned.
[I 2021-05-10 06:43:46,343] Trial 307 pruned.
[I 2021-05-10 06:43:46,864] Trial 308 pruned.
[I 2021-05-10 06:43:47,377] Trial 309 pruned.
[I 2021-05-10 06:43:47,900] Trial 310 pruned.
[I 2021-05-10 06:43:48,415] Trial 311 pruned.
[I 2021-05-10 06:43:48,937] Trial 312 pruned.
[I 2021-05-10 06:43:49,449] Trial 313 pruned.
[I 2021-05-10 06:43:49,965] Trial 314 pruned.
[I 2021-05-10 06:43:50,484] Trial 315 pruned.
[I 2021-05-10 06:43:50,992] Trial 316 pruned.
[I 2021-05-10 06:43:51,502] Trial 317 pruned.
[I 2021-05-10 06:43:52,010] Trial 318 pruned.
[I 2021-05-10 06:43:52,531] Trial 319 pruned.
[I 2021-05-10 06:43:53,057] Trial 320 pruned.
[I 2021-05-10 06:43:53,575] Trial 321 pruned.
[I 2021-05-10 06:43:53,652] Trial 322 pruned.
[I 2021-05-10 06:43:54,162] Trial 323 pruned.
[I 2021-05-10 06:43:54,686] Trial 324 pruned.
[I 2021-05-10 06:43:55,210] Trial 325 pruned.
[I 2021-05-10 06:43:55,717] Trial 326 pruned.
[I 2021-05-10 06:43:56,230] Trial 327 pruned.
[I 2021-05-10 06:43:56,738] Trial 328 pruned.
[I 2021-05-10 06:43:58,293] Trial 329 pruned.
[I 2021-05-10 06:43:58,556] Trial 330 pruned.
[I 2021-05-10 06:43:58,630] Trial 331 pruned.
[I 2021-05-10 06:43:59,151] Trial 332 pruned.
[I 2021-05-10 06:43:59,663] Trial 333 pruned.
[I 2021-05-10 06:44:00,189] Trial 334 pruned.
[I 2021-05-10 06:44:00,712] Trial 335 pruned.
[I 2021-05-10 06:44:01,220] Trial 336 pruned.
[I 2021-05-10 06:44:01,741] Trial 337 pruned.
[I 2021-05-10 06:44:02,253] Trial 338 pruned.
[I 2021-05-10 06:44:02,767] Trial 339 pruned.
[I 2021-05-10 06:44:02,845] Trial 340 pruned.
[I 2021-05-10 06:44:03,343] Trial 341 pruned.
[I 2021-05-10 06:44:03,861] Trial 342 pruned.
[I 2021-05-10 06:44:04,123] Trial 343 pruned.
[I 2021-05-10 06:44:04,632] Trial 344 pruned.
[I 2021-05-10 06:44:05,147] Trial 345 pruned.
[I 2021-05-10 06:44:05,668] Trial 346 pruned.
[I 2021-05-10 06:44:06,196] Trial 347 pruned.
[I 2021-05-10 06:44:06,700] Trial 348 pruned.
[I 2021-05-10 06:44:06,776] Trial 349 pruned.
[I 2021-05-10 06:44:07,285] Trial 350 pruned.
[I 2021-05-10 06:44:07,804] Trial 351 pruned.
[I 2021-05-10 06:44:08,320] Trial 352 pruned.
[I 2021-05-10 06:44:08,838] Trial 353 pruned.
[I 2021-05-10 06:44:09,360] Trial 354 pruned.
[I 2021-05-10 06:44:09,864] Trial 355 pruned.
[I 2021-05-10 06:44:10,383] Trial 356 pruned.
[I 2021-05-10 06:44:10,647] Trial 357 pruned.
[I 2021-05-10 06:44:10,784] Trial 358 pruned.
[I 2021-05-10 06:44:11,307] Trial 359 pruned.
[I 2021-05-10 06:44:11,384] Trial 360 pruned.
[I 2021-05-10 06:44:11,906] Trial 361 pruned.
[I 2021-05-10 06:44:12,425] Trial 362 pruned.
[I 2021-05-10 06:44:12,942] Trial 363 pruned.
[I 2021-05-10 06:44:13,464] Trial 364 pruned.
[I 2021-05-10 06:44:14,978] Trial 365 pruned.
[I 2021-05-10 06:44:15,489] Trial 366 pruned.
[I 2021-05-10 06:44:16,013] Trial 367 pruned.
[I 2021-05-10 06:44:16,523] Trial 368 pruned.
[I 2021-05-10 06:44:16,599] Trial 369 pruned.
[I 2021-05-10 06:44:18,125] Trial 370 pruned.
[I 2021-05-10 06:44:18,648] Trial 371 pruned.
[I 2021-05-10 06:44:19,152] Trial 372 pruned.
[I 2021-05-10 06:44:19,421] Trial 373 pruned.
[I 2021-05-10 06:44:19,943] Trial 374 pruned.
[I 2021-05-10 06:44:20,445] Trial 375 pruned.
[I 2021-05-10 06:44:20,967] Trial 376 pruned.
[I 2021-05-10 06:44:21,479] Trial 377 pruned.
[I 2021-05-10 06:44:21,996] Trial 378 pruned.
[I 2021-05-10 06:44:22,074] Trial 379 pruned.
[I 2021-05-10 06:44:22,587] Trial 380 pruned.
[I 2021-05-10 06:44:23,111] Trial 381 pruned.
[I 2021-05-10 06:44:23,623] Trial 382 pruned.
[I 2021-05-10 06:44:24,137] Trial 383 pruned.
[I 2021-05-10 06:44:24,645] Trial 384 pruned.
[I 2021-05-10 06:44:25,143] Trial 385 pruned.
[I 2021-05-10 06:44:25,411] Trial 386 pruned.
[I 2021-05-10 06:44:25,921] Trial 387 pruned.
[I 2021-05-10 06:44:26,061] Trial 388 pruned.
[I 2021-05-10 06:44:26,140] Trial 389 pruned.
[I 2021-05-10 06:44:26,650] Trial 390 pruned.
[I 2021-05-10 06:44:27,171] Trial 391 pruned.
[I 2021-05-10 06:44:27,694] Trial 392 pruned.
[I 2021-05-10 06:44:28,212] Trial 393 pruned.
[I 2021-05-10 06:44:28,735] Trial 394 pruned.
[I 2021-05-10 06:45:19,534] Trial 395 finished with value: 373.0365295410156 and parameters: {'lr': 0.006809828863347891, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:45:20,051] Trial 396 pruned.
[I 2021-05-10 06:45:20,580] Trial 397 pruned.
[I 2021-05-10 06:45:21,092] Trial 398 pruned.
[I 2021-05-10 06:45:21,612] Trial 399 pruned.
[I 2021-05-10 06:45:22,120] Trial 400 pruned.
[I 2021-05-10 06:45:22,630] Trial 401 pruned.
[I 2021-05-10 06:45:24,181] Trial 402 pruned.
[I 2021-05-10 06:45:24,690] Trial 403 pruned.
[I 2021-05-10 06:45:25,207] Trial 404 pruned.
[I 2021-05-10 06:45:25,719] Trial 405 pruned.
[I 2021-05-10 06:45:26,239] Trial 406 pruned.
[I 2021-05-10 06:45:26,761] Trial 407 pruned.
[I 2021-05-10 06:45:27,268] Trial 408 pruned.
[I 2021-05-10 06:45:27,537] Trial 409 pruned.
[I 2021-05-10 06:45:28,046] Trial 410 pruned.
[I 2021-05-10 06:45:28,567] Trial 411 pruned.
[I 2021-05-10 06:45:29,092] Trial 412 pruned.
[I 2021-05-10 06:45:29,593] Trial 413 pruned.
[I 2021-05-10 06:45:30,107] Trial 414 pruned.
[I 2021-05-10 06:45:30,615] Trial 415 pruned.
[I 2021-05-10 06:45:31,137] Trial 416 pruned.
[I 2021-05-10 06:45:31,655] Trial 417 pruned.
[I 2021-05-10 06:45:32,171] Trial 418 pruned.
[I 2021-05-10 06:45:32,698] Trial 419 pruned.
[I 2021-05-10 06:45:33,212] Trial 420 pruned.
[I 2021-05-10 06:45:33,733] Trial 421 pruned.
[I 2021-05-10 06:45:33,999] Trial 422 pruned.
[I 2021-05-10 06:45:34,510] Trial 423 pruned.
[I 2021-05-10 06:45:35,033] Trial 424 pruned.
[I 2021-05-10 06:45:35,178] Trial 425 pruned.
[I 2021-05-10 06:45:35,698] Trial 426 pruned.
[I 2021-05-10 06:45:36,215] Trial 427 pruned.
[I 2021-05-10 06:45:36,734] Trial 428 pruned.
[I 2021-05-10 06:45:37,260] Trial 429 pruned.
[I 2021-05-10 06:46:27,940] Trial 430 finished with value: 371.4259033203125 and parameters: {'lr': 0.00457034763965898, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:46:28,455] Trial 431 pruned.
[I 2021-05-10 06:46:28,970] Trial 432 pruned.
[I 2021-05-10 06:46:29,496] Trial 433 pruned.
[I 2021-05-10 06:47:20,246] Trial 434 finished with value: 381.4234313964844 and parameters: {'lr': 0.004644343109151167, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:47:20,756] Trial 435 pruned.
[I 2021-05-10 06:47:22,270] Trial 436 pruned.
[I 2021-05-10 06:47:22,781] Trial 437 pruned.
[I 2021-05-10 06:47:23,047] Trial 438 pruned.
[I 2021-05-10 06:47:23,557] Trial 439 pruned.
[I 2021-05-10 06:47:24,072] Trial 440 pruned.
[I 2021-05-10 06:47:24,589] Trial 441 pruned.
[I 2021-05-10 06:47:25,092] Trial 442 pruned.
[I 2021-05-10 06:47:25,595] Trial 443 pruned.
[I 2021-05-10 06:47:26,106] Trial 444 pruned.
[I 2021-05-10 06:47:26,619] Trial 445 pruned.
[I 2021-05-10 06:47:27,128] Trial 446 pruned.
[I 2021-05-10 06:47:27,636] Trial 447 pruned.
[I 2021-05-10 06:47:28,146] Trial 448 pruned.
[I 2021-05-10 06:47:28,665] Trial 449 pruned.
[I 2021-05-10 06:47:29,179] Trial 450 pruned.
[I 2021-05-10 06:47:29,444] Trial 451 pruned.
[I 2021-05-10 06:48:21,057] Trial 452 finished with value: 382.04193115234375 and parameters: {'lr': 0.006800354742992084, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:48:21,583] Trial 453 pruned.
[I 2021-05-10 06:48:22,100] Trial 454 pruned.
[I 2021-05-10 06:48:22,243] Trial 455 pruned.
[I 2021-05-10 06:48:33,384] Trial 456 pruned.
[I 2021-05-10 06:48:33,908] Trial 457 pruned.
[I 2021-05-10 06:48:34,410] Trial 458 pruned.
[I 2021-05-10 06:48:34,932] Trial 459 pruned.
[I 2021-05-10 06:48:35,452] Trial 460 pruned.
[I 2021-05-10 06:48:35,959] Trial 461 pruned.
[I 2021-05-10 06:48:36,479] Trial 462 pruned.
[I 2021-05-10 06:48:36,991] Trial 463 pruned.
[I 2021-05-10 06:48:37,514] Trial 464 pruned.
[I 2021-05-10 06:48:38,024] Trial 465 pruned.
[I 2021-05-10 06:48:39,568] Trial 466 pruned.
[I 2021-05-10 06:48:40,354] Trial 467 pruned.
[I 2021-05-10 06:48:40,873] Trial 468 pruned.
[I 2021-05-10 06:48:41,396] Trial 469 pruned.
[I 2021-05-10 06:48:41,896] Trial 470 pruned.
[I 2021-05-10 06:48:42,410] Trial 471 pruned.
[I 2021-05-10 06:48:42,921] Trial 472 pruned.
[I 2021-05-10 06:48:43,445] Trial 473 pruned.
[I 2021-05-10 06:48:43,969] Trial 474 pruned.
[I 2021-05-10 06:48:44,485] Trial 475 pruned.
[I 2021-05-10 06:48:45,006] Trial 476 pruned.
[I 2021-05-10 06:48:45,517] Trial 477 pruned.
[I 2021-05-10 06:48:46,045] Trial 478 pruned.
[I 2021-05-10 06:48:46,558] Trial 479 pruned.
[I 2021-05-10 06:48:46,829] Trial 480 pruned.
[I 2021-05-10 06:48:47,343] Trial 481 pruned.
[I 2021-05-10 06:49:38,911] Trial 482 finished with value: 369.0059509277344 and parameters: {'lr': 0.005220263103371839, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:49:39,436] Trial 483 pruned.
[I 2021-05-10 06:49:39,962] Trial 484 pruned.
[I 2021-05-10 06:49:40,488] Trial 485 pruned.
[I 2021-05-10 06:49:40,998] Trial 486 pruned.
[I 2021-05-10 06:49:41,523] Trial 487 pruned.
[I 2021-05-10 06:49:42,046] Trial 488 pruned.
[I 2021-05-10 06:49:42,564] Trial 489 pruned.
[I 2021-05-10 06:49:43,090] Trial 490 pruned.
[I 2021-05-10 06:49:43,613] Trial 491 pruned.
[I 2021-05-10 06:49:43,756] Trial 492 pruned.
[I 2021-05-10 06:49:44,284] Trial 493 pruned.
[I 2021-05-10 06:49:44,811] Trial 494 pruned.
[I 2021-05-10 06:49:45,332] Trial 495 pruned.
[I 2021-05-10 06:49:45,855] Trial 496 pruned.
[I 2021-05-10 06:49:46,381] Trial 497 pruned.
[I 2021-05-10 06:49:46,898] Trial 498 pruned.
[I 2021-05-10 06:49:47,411] Trial 499 pruned.
[I 2021-05-10 06:49:47,492] Trial 500 pruned.
[I 2021-05-10 06:49:48,015] Trial 501 pruned.
[I 2021-05-10 06:49:48,538] Trial 502 pruned.
[I 2021-05-10 06:49:49,065] Trial 503 pruned.
[I 2021-05-10 06:49:49,589] Trial 504 pruned.
[I 2021-05-10 06:49:50,116] Trial 505 pruned.
[I 2021-05-10 06:49:50,627] Trial 506 pruned.
[I 2021-05-10 06:49:51,139] Trial 507 pruned.
[I 2021-05-10 06:50:42,659] Trial 508 finished with value: 373.9319763183594 and parameters: {'lr': 0.0031226790555127274, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:50:43,178] Trial 509 pruned.
[I 2021-05-10 06:50:43,699] Trial 510 pruned.
[I 2021-05-10 06:50:44,225] Trial 511 pruned.
[I 2021-05-10 06:50:44,738] Trial 512 pruned.
[I 2021-05-10 06:50:45,263] Trial 513 pruned.
[I 2021-05-10 06:50:45,342] Trial 514 pruned.
[I 2021-05-10 06:50:45,855] Trial 515 pruned.
[I 2021-05-10 06:50:46,377] Trial 516 pruned.
[I 2021-05-10 06:50:46,895] Trial 517 pruned.
[I 2021-05-10 06:50:47,432] Trial 518 pruned.
[I 2021-05-10 06:50:47,948] Trial 519 pruned.
[I 2021-05-10 06:50:48,471] Trial 520 pruned.
[I 2021-05-10 06:50:48,985] Trial 521 pruned.
[I 2021-05-10 06:50:49,513] Trial 522 pruned.
[I 2021-05-10 06:50:50,040] Trial 523 pruned.
[I 2021-05-10 06:50:50,554] Trial 524 pruned.
[I 2021-05-10 06:50:51,082] Trial 525 pruned.
[I 2021-05-10 06:50:51,587] Trial 526 pruned.
[I 2021-05-10 06:50:51,732] Trial 527 pruned.
[I 2021-05-10 06:50:52,251] Trial 528 pruned.
[I 2021-05-10 06:50:52,331] Trial 529 pruned.
[I 2021-05-10 06:50:52,862] Trial 530 pruned.
[I 2021-05-10 06:50:53,374] Trial 531 pruned.
[I 2021-05-10 06:50:53,902] Trial 532 pruned.
[I 2021-05-10 06:50:54,423] Trial 533 pruned.
[I 2021-05-10 06:50:54,951] Trial 534 pruned.
[I 2021-05-10 06:50:55,469] Trial 535 pruned.
[I 2021-05-10 06:51:46,129] Trial 536 finished with value: 378.9613037109375 and parameters: {'lr': 0.009178582554096572, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:51:46,649] Trial 537 pruned.
[I 2021-05-10 06:51:47,162] Trial 538 pruned.
[I 2021-05-10 06:51:47,675] Trial 539 pruned.
[I 2021-05-10 06:51:48,193] Trial 540 pruned.
[I 2021-05-10 06:51:48,711] Trial 541 pruned.
[I 2021-05-10 06:51:49,217] Trial 542 pruned.
[I 2021-05-10 06:51:49,734] Trial 543 pruned.
[I 2021-05-10 06:51:49,813] Trial 544 pruned.
[I 2021-05-10 06:51:50,328] Trial 545 pruned.
[I 2021-05-10 06:51:50,842] Trial 546 pruned.
[I 2021-05-10 06:51:51,354] Trial 547 pruned.
[I 2021-05-10 06:51:51,871] Trial 548 pruned.
[I 2021-05-10 06:51:52,387] Trial 549 pruned.
[I 2021-05-10 06:51:52,901] Trial 550 pruned.
[I 2021-05-10 06:51:53,414] Trial 551 pruned.
[I 2021-05-10 06:51:53,556] Trial 552 pruned.
[I 2021-05-10 06:51:54,069] Trial 553 pruned.
[I 2021-05-10 06:51:54,581] Trial 554 pruned.
[I 2021-05-10 06:51:55,094] Trial 555 pruned.
[I 2021-05-10 06:52:46,497] Trial 556 finished with value: 370.6944885253906 and parameters: {'lr': 0.0052839518424413175, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:52:47,017] Trial 557 pruned.
[I 2021-05-10 06:52:47,534] Trial 558 pruned.
[I 2021-05-10 06:52:47,616] Trial 559 pruned.
[I 2021-05-10 06:52:48,144] Trial 560 pruned.
[I 2021-05-10 06:52:48,668] Trial 561 pruned.
[I 2021-05-10 06:52:49,191] Trial 562 pruned.
[I 2021-05-10 06:52:49,707] Trial 563 pruned.
[I 2021-05-10 06:52:50,236] Trial 564 pruned.
[I 2021-05-10 06:52:50,751] Trial 565 pruned.
[I 2021-05-10 06:52:51,277] Trial 566 pruned.
[I 2021-05-10 06:52:51,804] Trial 567 pruned.
[I 2021-05-10 06:52:52,326] Trial 568 pruned.
[I 2021-05-10 06:52:52,864] Trial 569 pruned.
[I 2021-05-10 06:52:53,371] Trial 570 pruned.
[I 2021-05-10 06:52:53,900] Trial 571 pruned.
[I 2021-05-10 06:52:53,980] Trial 572 pruned.
[I 2021-05-10 06:52:54,510] Trial 573 pruned.
[I 2021-05-10 06:52:55,036] Trial 574 pruned.
[I 2021-05-10 06:52:55,560] Trial 575 pruned.
[I 2021-05-10 06:52:56,081] Trial 576 pruned.
[I 2021-05-10 06:52:56,594] Trial 577 pruned.
[I 2021-05-10 06:52:57,119] Trial 578 pruned.
[I 2021-05-10 06:52:57,630] Trial 579 pruned.
[I 2021-05-10 06:52:58,159] Trial 580 pruned.
[I 2021-05-10 06:52:58,303] Trial 581 pruned.
[I 2021-05-10 06:52:58,827] Trial 582 pruned.
[I 2021-05-10 06:52:59,355] Trial 583 pruned.
[I 2021-05-10 06:52:59,863] Trial 584 pruned.
[I 2021-05-10 06:53:00,387] Trial 585 pruned.
[I 2021-05-10 06:53:00,901] Trial 586 pruned.
[I 2021-05-10 06:53:00,982] Trial 587 pruned.
[I 2021-05-10 06:53:01,498] Trial 588 pruned.
[I 2021-05-10 06:53:02,025] Trial 589 pruned.
[I 2021-05-10 06:53:03,554] Trial 590 pruned.
[I 2021-05-10 06:53:04,077] Trial 591 pruned.
[I 2021-05-10 06:53:04,602] Trial 592 pruned.
[I 2021-05-10 06:53:05,121] Trial 593 pruned.
[I 2021-05-10 06:53:05,642] Trial 594 pruned.
[I 2021-05-10 06:53:06,144] Trial 595 pruned.
[I 2021-05-10 06:53:06,670] Trial 596 pruned.
[I 2021-05-10 06:53:07,194] Trial 597 pruned.
[I 2021-05-10 06:53:07,724] Trial 598 pruned.
[I 2021-05-10 06:53:08,234] Trial 599 pruned.
[I 2021-05-10 06:53:08,762] Trial 600 pruned.
[I 2021-05-10 06:53:08,842] Trial 601 pruned.
[I 2021-05-10 06:53:09,358] Trial 602 pruned.
[I 2021-05-10 06:53:09,887] Trial 603 pruned.
[I 2021-05-10 06:53:10,408] Trial 604 pruned.
[I 2021-05-10 06:53:10,931] Trial 605 pruned.
[I 2021-05-10 06:53:11,444] Trial 606 pruned.
[I 2021-05-10 06:53:11,967] Trial 607 pruned.
[I 2021-05-10 06:53:12,481] Trial 608 pruned.
[I 2021-05-10 06:53:12,626] Trial 609 pruned.
[I 2021-05-10 06:53:13,150] Trial 610 pruned.
[I 2021-05-10 06:53:13,663] Trial 611 pruned.
[I 2021-05-10 06:53:14,183] Trial 612 pruned.
[I 2021-05-10 06:54:04,858] Trial 613 finished with value: 381.9510498046875 and parameters: {'lr': 0.003093318465150097, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:54:05,373] Trial 614 pruned.
[I 2021-05-10 06:54:05,454] Trial 615 pruned.
[I 2021-05-10 06:54:05,968] Trial 616 pruned.
[I 2021-05-10 06:54:06,487] Trial 617 pruned.
[I 2021-05-10 06:54:07,005] Trial 618 pruned.
[I 2021-05-10 06:54:07,521] Trial 619 pruned.
[I 2021-05-10 06:54:08,031] Trial 620 pruned.
[I 2021-05-10 06:54:08,548] Trial 621 pruned.
[I 2021-05-10 06:54:09,066] Trial 622 pruned.
[I 2021-05-10 06:54:09,570] Trial 623 pruned.
[I 2021-05-10 06:54:10,085] Trial 624 pruned.
[I 2021-05-10 06:54:10,594] Trial 625 pruned.
[I 2021-05-10 06:54:11,111] Trial 626 pruned.
[I 2021-05-10 06:54:11,626] Trial 627 pruned.
[I 2021-05-10 06:54:12,145] Trial 628 pruned.
[I 2021-05-10 06:54:12,658] Trial 629 pruned.
[I 2021-05-10 06:54:12,739] Trial 630 pruned.
[I 2021-05-10 06:54:13,251] Trial 631 pruned.
[I 2021-05-10 06:54:13,770] Trial 632 pruned.
[I 2021-05-10 06:54:14,283] Trial 633 pruned.
[I 2021-05-10 06:54:14,799] Trial 634 pruned.
[I 2021-05-10 06:54:15,319] Trial 635 pruned.
[I 2021-05-10 06:54:15,832] Trial 636 pruned.
[I 2021-05-10 06:54:16,356] Trial 637 pruned.
[I 2021-05-10 06:54:16,870] Trial 638 pruned.
[I 2021-05-10 06:54:17,390] Trial 639 pruned.
[I 2021-05-10 06:54:17,534] Trial 640 pruned.
[I 2021-05-10 06:54:18,071] Trial 641 pruned.
[I 2021-05-10 06:54:18,588] Trial 642 pruned.
[I 2021-05-10 06:54:19,118] Trial 643 pruned.
[I 2021-05-10 06:54:19,200] Trial 644 pruned.
[I 2021-05-10 06:54:19,714] Trial 645 pruned.
[I 2021-05-10 06:54:20,245] Trial 646 pruned.
[I 2021-05-10 06:54:20,765] Trial 647 pruned.
[I 2021-05-10 06:54:21,291] Trial 648 pruned.
[I 2021-05-10 06:54:22,826] Trial 649 pruned.
[I 2021-05-10 06:54:23,354] Trial 650 pruned.
[I 2021-05-10 06:54:23,862] Trial 651 pruned.
[I 2021-05-10 06:54:24,385] Trial 652 pruned.
[I 2021-05-10 06:54:24,901] Trial 653 pruned.
[I 2021-05-10 06:54:25,429] Trial 654 pruned.
[I 2021-05-10 06:54:25,944] Trial 655 pruned.
[I 2021-05-10 06:54:26,468] Trial 656 pruned.
[I 2021-05-10 06:54:26,550] Trial 657 pruned.
[I 2021-05-10 06:54:27,078] Trial 658 pruned.
[I 2021-05-10 06:54:27,592] Trial 659 pruned.
[I 2021-05-10 06:54:28,118] Trial 660 pruned.
[I 2021-05-10 06:54:28,631] Trial 661 pruned.
[I 2021-05-10 06:54:29,162] Trial 662 pruned.
[I 2021-05-10 06:54:29,685] Trial 663 pruned.
[I 2021-05-10 06:54:30,201] Trial 664 pruned.
[I 2021-05-10 06:54:30,725] Trial 665 pruned.
[I 2021-05-10 06:54:31,233] Trial 666 pruned.
[I 2021-05-10 06:54:31,378] Trial 667 pruned.
[I 2021-05-10 06:54:31,895] Trial 668 pruned.
[I 2021-05-10 06:54:32,419] Trial 669 pruned.
[I 2021-05-10 06:54:33,950] Trial 670 pruned.
[I 2021-05-10 06:54:34,477] Trial 671 pruned.
[I 2021-05-10 06:54:34,996] Trial 672 pruned.
[I 2021-05-10 06:54:35,079] Trial 673 pruned.
[I 2021-05-10 06:54:35,599] Trial 674 pruned.
[I 2021-05-10 06:54:36,121] Trial 675 pruned.
[I 2021-05-10 06:54:36,633] Trial 676 pruned.
[I 2021-05-10 06:54:37,157] Trial 677 pruned.
[I 2021-05-10 06:54:37,670] Trial 678 pruned.
[I 2021-05-10 06:54:38,184] Trial 679 pruned.
[I 2021-05-10 06:54:38,701] Trial 680 pruned.
[I 2021-05-10 06:54:39,228] Trial 681 pruned.
[I 2021-05-10 06:54:39,737] Trial 682 pruned.
[I 2021-05-10 06:54:40,266] Trial 683 pruned.
[I 2021-05-10 06:54:40,790] Trial 684 pruned.
[I 2021-05-10 06:54:41,318] Trial 685 pruned.
[I 2021-05-10 06:54:41,399] Trial 686 pruned.
[I 2021-05-10 06:54:41,924] Trial 687 pruned.
[I 2021-05-10 06:54:42,438] Trial 688 pruned.
[I 2021-05-10 06:54:42,965] Trial 689 pruned.
[I 2021-05-10 06:54:43,487] Trial 690 pruned.
[I 2021-05-10 06:54:44,007] Trial 691 pruned.
[I 2021-05-10 06:54:44,532] Trial 692 pruned.
[I 2021-05-10 06:54:45,038] Trial 693 pruned.
[I 2021-05-10 06:54:45,584] Trial 694 pruned.
[I 2021-05-10 06:54:46,097] Trial 695 pruned.
[I 2021-05-10 06:54:46,623] Trial 696 pruned.
[I 2021-05-10 06:54:46,768] Trial 697 pruned.
[I 2021-05-10 06:54:47,297] Trial 698 pruned.
[I 2021-05-10 06:54:47,819] Trial 699 pruned.
[I 2021-05-10 06:54:47,903] Trial 700 pruned.
[I 2021-05-10 06:54:48,425] Trial 701 pruned.
[I 2021-05-10 06:54:48,953] Trial 702 pruned.
[I 2021-05-10 06:54:49,465] Trial 703 pruned.
[I 2021-05-10 06:54:49,990] Trial 704 pruned.
[I 2021-05-10 06:54:50,502] Trial 705 pruned.
[I 2021-05-10 06:54:51,031] Trial 706 pruned.
[I 2021-05-10 06:54:51,547] Trial 707 pruned.
[I 2021-05-10 06:54:51,816] Trial 708 pruned.
[I 2021-05-10 06:54:52,339] Trial 709 pruned.
[I 2021-05-10 06:54:52,871] Trial 710 pruned.
[I 2021-05-10 06:54:53,383] Trial 711 pruned.
[I 2021-05-10 06:54:54,950] Trial 712 pruned.
[I 2021-05-10 06:54:55,473] Trial 713 pruned.
[I 2021-05-10 06:54:55,998] Trial 714 pruned.
[I 2021-05-10 06:54:56,080] Trial 715 pruned.
[I 2021-05-10 06:54:56,604] Trial 716 pruned.
[I 2021-05-10 06:54:57,130] Trial 717 pruned.
[I 2021-05-10 06:54:57,647] Trial 718 pruned.
[I 2021-05-10 06:54:58,171] Trial 719 pruned.
[I 2021-05-10 06:54:58,683] Trial 720 pruned.
[I 2021-05-10 06:54:59,213] Trial 721 pruned.
[I 2021-05-10 06:54:59,732] Trial 722 pruned.
[I 2021-05-10 06:55:00,264] Trial 723 pruned.
[I 2021-05-10 06:55:00,773] Trial 724 pruned.
[I 2021-05-10 06:55:01,301] Trial 725 pruned.
[I 2021-05-10 06:55:01,445] Trial 726 pruned.
[I 2021-05-10 06:55:01,973] Trial 727 pruned.
[I 2021-05-10 06:55:02,489] Trial 728 pruned.
[I 2021-05-10 06:55:03,018] Trial 729 pruned.
[I 2021-05-10 06:55:03,100] Trial 730 pruned.
[I 2021-05-10 06:55:03,628] Trial 731 pruned.
[I 2021-05-10 06:55:04,146] Trial 732 pruned.
[I 2021-05-10 06:55:04,673] Trial 733 pruned.
[I 2021-05-10 06:55:05,174] Trial 734 pruned.
[I 2021-05-10 06:55:05,699] Trial 735 pruned.
[I 2021-05-10 06:55:06,217] Trial 736 pruned.
[I 2021-05-10 06:55:06,731] Trial 737 pruned.
[I 2021-05-10 06:55:07,253] Trial 738 pruned.
[I 2021-05-10 06:55:07,527] Trial 739 pruned.
[I 2021-05-10 06:55:08,042] Trial 740 pruned.
[I 2021-05-10 06:55:08,567] Trial 741 pruned.
[I 2021-05-10 06:55:09,085] Trial 742 pruned.
[I 2021-05-10 06:55:09,609] Trial 743 pruned.
[I 2021-05-10 06:55:09,692] Trial 744 pruned.
[I 2021-05-10 06:55:10,211] Trial 745 pruned.
[I 2021-05-10 06:55:10,742] Trial 746 pruned.
[I 2021-05-10 06:55:11,259] Trial 747 pruned.
[I 2021-05-10 06:55:11,776] Trial 748 pruned.
[I 2021-05-10 06:55:12,295] Trial 749 pruned.
[I 2021-05-10 06:55:12,816] Trial 750 pruned.
[I 2021-05-10 06:55:13,331] Trial 751 pruned.
[I 2021-05-10 06:55:13,857] Trial 752 pruned.
[I 2021-05-10 06:55:14,376] Trial 753 pruned.
[I 2021-05-10 06:55:14,900] Trial 754 pruned.
[I 2021-05-10 06:55:15,045] Trial 755 pruned.
[I 2021-05-10 06:55:15,576] Trial 756 pruned.
[I 2021-05-10 06:55:16,089] Trial 757 pruned.
[I 2021-05-10 06:55:16,613] Trial 758 pruned.
[I 2021-05-10 06:55:16,696] Trial 759 pruned.
[I 2021-05-10 06:55:17,212] Trial 760 pruned.
[I 2021-05-10 06:55:17,722] Trial 761 pruned.
[I 2021-05-10 06:55:18,250] Trial 762 pruned.
[I 2021-05-10 06:55:18,767] Trial 763 pruned.
[I 2021-05-10 06:56:10,055] Trial 764 finished with value: 426.1875915527344 and parameters: {'lr': 0.004288790618277832, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:56:10,573] Trial 765 pruned.
[I 2021-05-10 06:56:11,100] Trial 766 pruned.
[I 2021-05-10 06:56:11,368] Trial 767 pruned.
[I 2021-05-10 06:57:02,649] Trial 768 finished with value: 377.3831481933594 and parameters: {'lr': 0.005946469972758735, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:57:03,178] Trial 769 pruned.
[I 2021-05-10 06:57:03,710] Trial 770 pruned.
[I 2021-05-10 06:57:04,238] Trial 771 pruned.
[I 2021-05-10 06:57:04,766] Trial 772 pruned.
[I 2021-05-10 06:57:05,294] Trial 773 pruned.
[I 2021-05-10 06:57:05,379] Trial 774 pruned.
[I 2021-05-10 06:57:05,898] Trial 775 pruned.
[I 2021-05-10 06:57:06,427] Trial 776 pruned.
[I 2021-05-10 06:57:06,957] Trial 777 pruned.
[I 2021-05-10 06:57:07,482] Trial 778 pruned.
[I 2021-05-10 06:57:08,008] Trial 779 pruned.
[I 2021-05-10 06:57:08,528] Trial 780 pruned.
[I 2021-05-10 06:57:09,059] Trial 781 pruned.
[I 2021-05-10 06:57:09,590] Trial 782 pruned.
[I 2021-05-10 06:57:59,308] Trial 783 pruned.
[I 2021-05-10 06:57:59,840] Trial 784 pruned.
[I 2021-05-10 06:58:00,369] Trial 785 pruned.
[I 2021-05-10 06:58:00,518] Trial 786 pruned.
[I 2021-05-10 06:58:01,046] Trial 787 pruned.
[I 2021-05-10 06:58:01,130] Trial 788 pruned.
[I 2021-05-10 06:58:01,649] Trial 789 pruned.
[I 2021-05-10 06:58:02,174] Trial 790 pruned.
[I 2021-05-10 06:58:02,690] Trial 791 pruned.
[I 2021-05-10 06:58:03,223] Trial 792 pruned.
[I 2021-05-10 06:58:03,752] Trial 793 pruned.
[I 2021-05-10 06:58:04,277] Trial 794 pruned.
[I 2021-05-10 06:58:04,803] Trial 795 pruned.
[I 2021-05-10 06:58:05,339] Trial 796 pruned.
[I 2021-05-10 06:58:05,613] Trial 797 pruned.
[I 2021-05-10 06:58:06,138] Trial 798 pruned.
[I 2021-05-10 06:58:06,663] Trial 799 pruned.
[I 2021-05-10 06:58:07,191] Trial 800 pruned.
[I 2021-05-10 06:58:07,718] Trial 801 pruned.
[I 2021-05-10 06:58:08,232] Trial 802 pruned.
[I 2021-05-10 06:58:08,316] Trial 803 pruned.
[I 2021-05-10 06:58:08,841] Trial 804 pruned.
[I 2021-05-10 06:58:09,348] Trial 805 pruned.
[I 2021-05-10 06:58:09,875] Trial 806 pruned.
[I 2021-05-10 06:58:10,399] Trial 807 pruned.
[I 2021-05-10 06:58:10,927] Trial 808 pruned.
[I 2021-05-10 06:58:11,443] Trial 809 pruned.
[I 2021-05-10 06:58:11,971] Trial 810 pruned.
[I 2021-05-10 06:58:12,487] Trial 811 pruned.
[I 2021-05-10 06:58:13,014] Trial 812 pruned.
[I 2021-05-10 06:58:13,161] Trial 813 pruned.
[I 2021-05-10 06:58:13,687] Trial 814 pruned.
[I 2021-05-10 06:58:14,210] Trial 815 pruned.
[I 2021-05-10 06:58:14,723] Trial 816 pruned.
[I 2021-05-10 06:58:14,807] Trial 817 pruned.
[I 2021-05-10 06:58:15,326] Trial 818 pruned.
[I 2021-05-10 06:58:15,849] Trial 819 pruned.
[I 2021-05-10 06:58:16,366] Trial 820 pruned.
[I 2021-05-10 06:58:16,893] Trial 821 pruned.
[I 2021-05-10 06:58:17,411] Trial 822 pruned.
[I 2021-05-10 06:58:17,939] Trial 823 pruned.
[I 2021-05-10 06:58:18,458] Trial 824 pruned.
[I 2021-05-10 06:58:18,989] Trial 825 pruned.
[I 2021-05-10 06:58:19,265] Trial 826 pruned.
[I 2021-05-10 06:58:19,786] Trial 827 pruned.
[I 2021-05-10 06:58:20,314] Trial 828 pruned.
[I 2021-05-10 06:58:20,827] Trial 829 pruned.
[I 2021-05-10 06:58:21,352] Trial 830 pruned.
[I 2021-05-10 06:58:21,871] Trial 831 pruned.
[I 2021-05-10 06:58:22,386] Trial 832 pruned.
[I 2021-05-10 06:58:22,470] Trial 833 pruned.
[I 2021-05-10 06:58:23,000] Trial 834 pruned.
[I 2021-05-10 06:58:23,525] Trial 835 pruned.
[I 2021-05-10 06:58:24,039] Trial 836 pruned.
[I 2021-05-10 06:58:24,565] Trial 837 pruned.
[I 2021-05-10 06:58:25,088] Trial 838 pruned.
[I 2021-05-10 06:58:25,620] Trial 839 pruned.
[I 2021-05-10 06:58:26,141] Trial 840 pruned.
[I 2021-05-10 06:58:26,667] Trial 841 pruned.
[I 2021-05-10 06:58:27,188] Trial 842 pruned.
[I 2021-05-10 06:58:27,335] Trial 843 pruned.
[I 2021-05-10 06:58:27,865] Trial 844 pruned.
[I 2021-05-10 06:58:28,385] Trial 845 pruned.
[I 2021-05-10 06:58:28,905] Trial 846 pruned.
[I 2021-05-10 06:58:28,989] Trial 847 pruned.
[I 2021-05-10 06:58:29,514] Trial 848 pruned.
[I 2021-05-10 06:58:30,032] Trial 849 pruned.
[I 2021-05-10 06:58:30,557] Trial 850 pruned.
[I 2021-05-10 06:58:31,079] Trial 851 pruned.
[I 2021-05-10 06:58:31,623] Trial 852 pruned.
[I 2021-05-10 06:58:32,153] Trial 853 pruned.
[I 2021-05-10 06:58:32,668] Trial 854 pruned.
[I 2021-05-10 06:58:33,199] Trial 855 pruned.
[I 2021-05-10 06:58:33,463] Trial 856 pruned.
[I 2021-05-10 06:58:35,012] Trial 857 pruned.
[I 2021-05-10 06:58:35,530] Trial 858 pruned.
[I 2021-05-10 06:58:36,061] Trial 859 pruned.
[I 2021-05-10 06:58:36,568] Trial 860 pruned.
[I 2021-05-10 06:58:36,655] Trial 861 pruned.
[I 2021-05-10 06:58:37,182] Trial 862 pruned.
[I 2021-05-10 06:58:37,702] Trial 863 pruned.
[I 2021-05-10 06:58:38,234] Trial 864 pruned.
[I 2021-05-10 06:58:38,750] Trial 865 pruned.
[I 2021-05-10 06:58:39,279] Trial 866 pruned.
[I 2021-05-10 06:58:39,797] Trial 867 pruned.
[I 2021-05-10 06:58:40,326] Trial 868 pruned.
[I 2021-05-10 06:58:41,863] Trial 869 pruned.
[I 2021-05-10 06:58:42,392] Trial 870 pruned.
[I 2021-05-10 06:58:42,900] Trial 871 pruned.
[I 2021-05-10 06:58:43,425] Trial 872 pruned.
[I 2021-05-10 06:58:43,944] Trial 873 pruned.
[I 2021-05-10 06:58:44,091] Trial 874 pruned.
[I 2021-05-10 06:58:44,621] Trial 875 pruned.
[I 2021-05-10 06:58:44,706] Trial 876 pruned.
[I 2021-05-10 06:58:45,234] Trial 877 pruned.
[I 2021-05-10 06:58:45,760] Trial 878 pruned.
[I 2021-05-10 06:58:47,323] Trial 879 pruned.
[I 2021-05-10 06:58:47,856] Trial 880 pruned.
[I 2021-05-10 06:58:48,373] Trial 881 pruned.
[I 2021-05-10 06:58:48,901] Trial 882 pruned.
[I 2021-05-10 06:58:49,419] Trial 883 pruned.
[I 2021-05-10 06:58:49,942] Trial 884 pruned.
[I 2021-05-10 06:58:50,213] Trial 885 pruned.
[I 2021-05-10 06:58:50,740] Trial 886 pruned.
[I 2021-05-10 06:58:51,245] Trial 887 pruned.
[I 2021-05-10 06:58:51,776] Trial 888 pruned.
[I 2021-05-10 06:58:52,304] Trial 889 pruned.
[I 2021-05-10 06:58:52,831] Trial 890 pruned.
[I 2021-05-10 06:58:52,918] Trial 891 pruned.
[I 2021-05-10 06:58:53,440] Trial 892 pruned.
[I 2021-05-10 06:58:53,967] Trial 893 pruned.
[I 2021-05-10 06:58:54,492] Trial 894 pruned.
[I 2021-05-10 06:58:55,026] Trial 895 pruned.
[I 2021-05-10 06:58:55,554] Trial 896 pruned.
[I 2021-05-10 06:58:56,088] Trial 897 pruned.
[I 2021-05-10 06:58:56,619] Trial 898 pruned.
[I 2021-05-10 06:58:57,151] Trial 899 pruned.
[I 2021-05-10 06:58:57,679] Trial 900 pruned.
[I 2021-05-10 06:58:58,204] Trial 901 pruned.
[I 2021-05-10 06:58:58,733] Trial 902 pruned.
[I 2021-05-10 06:58:59,252] Trial 903 pruned.
[I 2021-05-10 06:58:59,338] Trial 904 pruned.
[I 2021-05-10 06:58:59,486] Trial 905 pruned.
[I 2021-05-10 06:59:00,018] Trial 906 pruned.
[I 2021-05-10 06:59:00,544] Trial 907 pruned.
[I 2021-05-10 06:59:01,065] Trial 908 pruned.
[I 2021-05-10 06:59:52,451] Trial 909 finished with value: 366.3580627441406 and parameters: {'lr': 0.0069468459188290095, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 06:59:52,980] Trial 910 pruned.
[I 2021-05-10 06:59:53,499] Trial 911 pruned.
[I 2021-05-10 06:59:54,028] Trial 912 pruned.
[I 2021-05-10 07:00:20,168] Trial 913 pruned.
[I 2021-05-10 07:00:20,688] Trial 914 pruned.
[I 2021-05-10 07:00:20,965] Trial 915 pruned.
[I 2021-05-10 07:00:21,498] Trial 916 pruned.
[I 2021-05-10 07:00:22,029] Trial 917 pruned.
[I 2021-05-10 07:00:22,558] Trial 918 pruned.
[I 2021-05-10 07:00:24,102] Trial 919 pruned.
[I 2021-05-10 07:00:24,189] Trial 920 pruned.
[I 2021-05-10 07:00:24,717] Trial 921 pruned.
[I 2021-05-10 07:00:25,247] Trial 922 pruned.
[I 2021-05-10 07:00:25,784] Trial 923 pruned.
[I 2021-05-10 07:00:26,312] Trial 924 pruned.
[I 2021-05-10 07:00:26,830] Trial 925 pruned.
[I 2021-05-10 07:00:27,348] Trial 926 pruned.
[I 2021-05-10 07:00:27,879] Trial 927 pruned.
[I 2021-05-10 07:00:28,411] Trial 928 pruned.
[I 2021-05-10 07:00:28,935] Trial 929 pruned.
[I 2021-05-10 07:00:29,464] Trial 930 pruned.
[I 2021-05-10 07:00:29,989] Trial 931 pruned.
[I 2021-05-10 07:00:31,547] Trial 932 pruned.
[I 2021-05-10 07:00:31,697] Trial 933 pruned.
[I 2021-05-10 07:00:32,227] Trial 934 pruned.
[I 2021-05-10 07:00:32,314] Trial 935 pruned.
[I 2021-05-10 07:00:32,845] Trial 936 pruned.
[I 2021-05-10 07:00:33,374] Trial 937 pruned.
[I 2021-05-10 07:00:33,906] Trial 938 pruned.
[I 2021-05-10 07:00:34,425] Trial 939 pruned.
[I 2021-05-10 07:00:34,958] Trial 940 pruned.
[I 2021-05-10 07:00:35,475] Trial 941 pruned.
[I 2021-05-10 07:00:36,004] Trial 942 pruned.
[I 2021-05-10 07:00:36,532] Trial 943 pruned.
[I 2021-05-10 07:00:37,061] Trial 944 pruned.
[I 2021-05-10 07:00:37,589] Trial 945 pruned.
[I 2021-05-10 07:00:37,866] Trial 946 pruned.
[I 2021-05-10 07:00:38,401] Trial 947 pruned.
[I 2021-05-10 07:00:38,928] Trial 948 pruned.
[I 2021-05-10 07:00:39,015] Trial 949 pruned.
[I 2021-05-10 07:00:39,545] Trial 950 pruned.
[I 2021-05-10 07:00:40,076] Trial 951 pruned.
[I 2021-05-10 07:00:40,586] Trial 952 pruned.
[I 2021-05-10 07:00:41,109] Trial 953 pruned.
[I 2021-05-10 07:00:41,627] Trial 954 pruned.
[I 2021-05-10 07:00:42,147] Trial 955 pruned.
[I 2021-05-10 07:00:42,671] Trial 956 pruned.
[I 2021-05-10 07:00:43,203] Trial 957 pruned.
[I 2021-05-10 07:00:43,737] Trial 958 pruned.
[I 2021-05-10 07:00:44,262] Trial 959 pruned.
[I 2021-05-10 07:00:44,791] Trial 960 pruned.
[I 2021-05-10 07:00:45,316] Trial 961 pruned.
[I 2021-05-10 07:00:45,848] Trial 962 pruned.
[I 2021-05-10 07:00:45,999] Trial 963 pruned.
[I 2021-05-10 07:00:46,086] Trial 964 pruned.
[I 2021-05-10 07:00:46,628] Trial 965 pruned.
[I 2021-05-10 07:00:47,143] Trial 966 pruned.
[I 2021-05-10 07:00:47,677] Trial 967 pruned.
[I 2021-05-10 07:00:48,205] Trial 968 pruned.
[I 2021-05-10 07:00:48,731] Trial 969 pruned.
[I 2021-05-10 07:01:40,144] Trial 970 finished with value: 535.1224365234375 and parameters: {'lr': 0.006503096588761899, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:01:40,662] Trial 971 pruned.
[I 2021-05-10 07:01:41,193] Trial 972 pruned.
[I 2021-05-10 07:01:41,713] Trial 973 pruned.
[I 2021-05-10 07:01:42,246] Trial 974 pruned.
[I 2021-05-10 07:01:42,522] Trial 975 pruned.
[I 2021-05-10 07:01:43,042] Trial 976 pruned.
[I 2021-05-10 07:01:43,574] Trial 977 pruned.
[I 2021-05-10 07:01:44,095] Trial 978 pruned.
[I 2021-05-10 07:01:44,182] Trial 979 pruned.
[I 2021-05-10 07:01:44,698] Trial 980 pruned.
[I 2021-05-10 07:01:45,226] Trial 981 pruned.
[I 2021-05-10 07:01:45,753] Trial 982 pruned.
[I 2021-05-10 07:01:46,275] Trial 983 pruned.
[I 2021-05-10 07:01:46,805] Trial 984 pruned.
[I 2021-05-10 07:01:47,341] Trial 985 pruned.
[I 2021-05-10 07:01:47,866] Trial 986 pruned.
[I 2021-05-10 07:01:48,398] Trial 987 pruned.
[I 2021-05-10 07:01:48,918] Trial 988 pruned.
[I 2021-05-10 07:01:49,450] Trial 989 pruned.
[I 2021-05-10 07:01:49,967] Trial 990 pruned.
[I 2021-05-10 07:01:50,118] Trial 991 pruned.
[I 2021-05-10 07:01:50,643] Trial 992 pruned.
[I 2021-05-10 07:01:50,730] Trial 993 pruned.
[I 2021-05-10 07:01:51,255] Trial 994 pruned.
[I 2021-05-10 07:01:51,775] Trial 995 pruned.
[I 2021-05-10 07:01:52,306] Trial 996 pruned.
[I 2021-05-10 07:01:52,827] Trial 997 pruned.
[I 2021-05-10 07:01:53,348] Trial 998 pruned.
[I 2021-05-10 07:01:53,878] Trial 999 pruned.
[I 2021-05-10 07:01:54,401] Trial 1000 pruned.
[I 2021-05-10 07:01:54,930] Trial 1001 pruned.
[I 2021-05-10 07:01:55,461] Trial 1002 pruned.
[I 2021-05-10 07:01:55,987] Trial 1003 pruned.
[I 2021-05-10 07:01:56,523] Trial 1004 pruned.
[I 2021-05-10 07:01:57,043] Trial 1005 pruned.
[I 2021-05-10 07:01:57,577] Trial 1006 pruned.
[I 2021-05-10 07:01:57,851] Trial 1007 pruned.
[I 2021-05-10 07:01:57,936] Trial 1008 pruned.
[I 2021-05-10 07:01:58,461] Trial 1009 pruned.
[I 2021-05-10 07:01:58,984] Trial 1010 pruned.
[I 2021-05-10 07:01:59,513] Trial 1011 pruned.
[I 2021-05-10 07:02:50,851] Trial 1012 finished with value: 376.5007019042969 and parameters: {'lr': 0.0036777757863349536, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:02:51,380] Trial 1013 pruned.
[I 2021-05-10 07:02:51,908] Trial 1014 pruned.
[I 2021-05-10 07:02:52,439] Trial 1015 pruned.
[I 2021-05-10 07:02:52,973] Trial 1016 pruned.
[I 2021-05-10 07:02:53,507] Trial 1017 pruned.
[I 2021-05-10 07:02:54,039] Trial 1018 pruned.
[I 2021-05-10 07:02:54,565] Trial 1019 pruned.
[I 2021-05-10 07:02:55,102] Trial 1020 pruned.
[I 2021-05-10 07:02:55,613] Trial 1021 pruned.
[I 2021-05-10 07:02:55,764] Trial 1022 pruned.
[I 2021-05-10 07:02:56,292] Trial 1023 pruned.
[I 2021-05-10 07:02:56,819] Trial 1024 pruned.
[I 2021-05-10 07:02:57,349] Trial 1025 pruned.
[I 2021-05-10 07:02:57,437] Trial 1026 pruned.
[I 2021-05-10 07:02:58,992] Trial 1027 pruned.
[I 2021-05-10 07:02:59,515] Trial 1028 pruned.
[I 2021-05-10 07:03:00,047] Trial 1029 pruned.
[I 2021-05-10 07:03:00,577] Trial 1030 pruned.
[I 2021-05-10 07:03:01,097] Trial 1031 pruned.
[I 2021-05-10 07:03:01,627] Trial 1032 pruned.
[I 2021-05-10 07:03:02,153] Trial 1033 pruned.
[I 2021-05-10 07:03:02,436] Trial 1034 pruned.
[I 2021-05-10 07:03:02,956] Trial 1035 pruned.
[I 2021-05-10 07:03:03,467] Trial 1036 pruned.
[I 2021-05-10 07:03:54,707] Trial 1037 finished with value: 401.2752380371094 and parameters: {'lr': 0.0035900268661881738, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:03:54,794] Trial 1038 pruned.
[I 2021-05-10 07:03:55,324] Trial 1039 pruned.
[I 2021-05-10 07:03:55,854] Trial 1040 pruned.
[I 2021-05-10 07:03:56,380] Trial 1041 pruned.
[I 2021-05-10 07:03:56,914] Trial 1042 pruned.
[I 2021-05-10 07:03:57,435] Trial 1043 pruned.
[I 2021-05-10 07:03:57,967] Trial 1044 pruned.
[I 2021-05-10 07:03:58,500] Trial 1045 pruned.
[I 2021-05-10 07:03:59,021] Trial 1046 pruned.
[I 2021-05-10 07:04:50,523] Trial 1047 finished with value: 403.2982482910156 and parameters: {'lr': 0.004759002432892931, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:04:51,034] Trial 1048 pruned.
[I 2021-05-10 07:04:51,567] Trial 1049 pruned.
[I 2021-05-10 07:04:52,092] Trial 1050 pruned.
[I 2021-05-10 07:04:52,247] Trial 1051 pruned.
[I 2021-05-10 07:04:52,787] Trial 1052 pruned.
[I 2021-05-10 07:04:53,314] Trial 1053 pruned.
[I 2021-05-10 07:04:53,403] Trial 1054 pruned.
[I 2021-05-10 07:04:53,933] Trial 1055 pruned.
[I 2021-05-10 07:05:38,997] Trial 1056 pruned.
[I 2021-05-10 07:05:39,529] Trial 1057 pruned.
[I 2021-05-10 07:05:40,049] Trial 1058 pruned.
[I 2021-05-10 07:05:40,580] Trial 1059 pruned.
[I 2021-05-10 07:05:41,103] Trial 1060 pruned.
[I 2021-05-10 07:05:41,632] Trial 1061 pruned.
[I 2021-05-10 07:05:42,155] Trial 1062 pruned.
[I 2021-05-10 07:05:42,677] Trial 1063 pruned.
[I 2021-05-10 07:05:42,956] Trial 1064 pruned.
[I 2021-05-10 07:05:43,456] Trial 1065 pruned.
[I 2021-05-10 07:05:43,991] Trial 1066 pruned.
[I 2021-05-10 07:05:44,081] Trial 1067 pruned.
[I 2021-05-10 07:05:44,602] Trial 1068 pruned.
[I 2021-05-10 07:05:45,136] Trial 1069 pruned.
[I 2021-05-10 07:05:45,663] Trial 1070 pruned.
[I 2021-05-10 07:05:47,221] Trial 1071 pruned.
[I 2021-05-10 07:05:47,754] Trial 1072 pruned.
[I 2021-05-10 07:05:48,276] Trial 1073 pruned.
[I 2021-05-10 07:05:48,809] Trial 1074 pruned.
[I 2021-05-10 07:05:49,323] Trial 1075 pruned.
[I 2021-05-10 07:05:49,855] Trial 1076 pruned.
[I 2021-05-10 07:05:50,378] Trial 1077 pruned.
[I 2021-05-10 07:05:51,935] Trial 1078 pruned.
[I 2021-05-10 07:05:52,460] Trial 1079 pruned.
[I 2021-05-10 07:05:52,987] Trial 1080 pruned.
[I 2021-05-10 07:05:53,076] Trial 1081 pruned.
[I 2021-05-10 07:05:53,226] Trial 1082 pruned.
[I 2021-05-10 07:05:53,761] Trial 1083 pruned.
[I 2021-05-10 07:05:54,294] Trial 1084 pruned.
[I 2021-05-10 07:05:54,820] Trial 1085 pruned.
[I 2021-05-10 07:05:55,355] Trial 1086 pruned.
[I 2021-05-10 07:05:55,874] Trial 1087 pruned.
[I 2021-05-10 07:05:56,405] Trial 1088 pruned.
[I 2021-05-10 07:05:56,932] Trial 1089 pruned.
[I 2021-05-10 07:05:57,461] Trial 1090 pruned.
[I 2021-05-10 07:05:57,987] Trial 1091 pruned.
[I 2021-05-10 07:05:58,498] Trial 1092 pruned.
[I 2021-05-10 07:05:59,034] Trial 1093 pruned.
[I 2021-05-10 07:05:59,313] Trial 1094 pruned.
[I 2021-05-10 07:05:59,840] Trial 1095 pruned.
[I 2021-05-10 07:06:00,385] Trial 1096 pruned.
[I 2021-05-10 07:06:01,921] Trial 1097 pruned.
[I 2021-05-10 07:06:02,011] Trial 1098 pruned.
[I 2021-05-10 07:06:53,185] Trial 1099 finished with value: 379.1881408691406 and parameters: {'lr': 0.007449639166343035, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:06:53,711] Trial 1100 pruned.
[I 2021-05-10 07:06:54,249] Trial 1101 pruned.
[I 2021-05-10 07:06:54,780] Trial 1102 pruned.
[I 2021-05-10 07:06:55,298] Trial 1103 pruned.
[I 2021-05-10 07:06:55,828] Trial 1104 pruned.
[I 2021-05-10 07:06:56,349] Trial 1105 pruned.
[I 2021-05-10 07:06:57,894] Trial 1106 pruned.
[I 2021-05-10 07:06:58,431] Trial 1107 pruned.
[I 2021-05-10 07:06:58,955] Trial 1108 pruned.
[I 2021-05-10 07:06:59,492] Trial 1109 pruned.
[I 2021-05-10 07:06:59,644] Trial 1110 pruned.
[I 2021-05-10 07:07:00,180] Trial 1111 pruned.
[I 2021-05-10 07:07:00,269] Trial 1112 pruned.
[I 2021-05-10 07:07:00,796] Trial 1113 pruned.
[I 2021-05-10 07:07:01,330] Trial 1114 pruned.
[I 2021-05-10 07:07:01,852] Trial 1115 pruned.
[I 2021-05-10 07:07:02,386] Trial 1116 pruned.
[I 2021-05-10 07:07:02,913] Trial 1117 pruned.
[I 2021-05-10 07:07:03,438] Trial 1118 pruned.
[I 2021-05-10 07:07:03,968] Trial 1119 pruned.
[I 2021-05-10 07:07:04,491] Trial 1120 pruned.
[I 2021-05-10 07:07:05,025] Trial 1121 pruned.
[I 2021-05-10 07:07:05,544] Trial 1122 pruned.
[I 2021-05-10 07:07:05,823] Trial 1123 pruned.
[I 2021-05-10 07:07:06,362] Trial 1124 pruned.
[I 2021-05-10 07:07:06,886] Trial 1125 pruned.
[I 2021-05-10 07:07:06,978] Trial 1126 pruned.
[I 2021-05-10 07:07:07,506] Trial 1127 pruned.
[I 2021-05-10 07:07:08,039] Trial 1128 pruned.
[I 2021-05-10 07:07:08,571] Trial 1129 pruned.
[I 2021-05-10 07:07:09,098] Trial 1130 pruned.
[I 2021-05-10 07:07:09,621] Trial 1131 pruned.
[I 2021-05-10 07:07:10,144] Trial 1132 pruned.
[I 2021-05-10 07:07:10,678] Trial 1133 pruned.
[I 2021-05-10 07:07:11,213] Trial 1134 pruned.
[I 2021-05-10 07:07:11,724] Trial 1135 pruned.
[I 2021-05-10 07:07:12,258] Trial 1136 pruned.
[I 2021-05-10 07:07:12,782] Trial 1137 pruned.
[I 2021-05-10 07:07:13,318] Trial 1138 pruned.
[I 2021-05-10 07:08:03,023] Trial 1139 pruned.
[I 2021-05-10 07:08:03,174] Trial 1140 pruned.
[I 2021-05-10 07:08:03,266] Trial 1141 pruned.
[I 2021-05-10 07:08:03,794] Trial 1142 pruned.
[I 2021-05-10 07:08:04,332] Trial 1143 pruned.
[I 2021-05-10 07:08:04,853] Trial 1144 pruned.
[I 2021-05-10 07:08:05,376] Trial 1145 pruned.
[I 2021-05-10 07:08:05,897] Trial 1146 pruned.
[I 2021-05-10 07:08:06,416] Trial 1147 pruned.
[I 2021-05-10 07:08:06,947] Trial 1148 pruned.
[I 2021-05-10 07:08:07,469] Trial 1149 pruned.
[I 2021-05-10 07:08:08,005] Trial 1150 pruned.
[I 2021-05-10 07:08:08,551] Trial 1151 pruned.
[I 2021-05-10 07:08:08,838] Trial 1152 pruned.
[I 2021-05-10 07:08:09,368] Trial 1153 pruned.
[I 2021-05-10 07:08:09,890] Trial 1154 pruned.
[I 2021-05-10 07:08:10,428] Trial 1155 pruned.
[I 2021-05-10 07:08:10,518] Trial 1156 pruned.
[I 2021-05-10 07:08:11,048] Trial 1157 pruned.
[I 2021-05-10 07:08:11,569] Trial 1158 pruned.
[I 2021-05-10 07:08:12,099] Trial 1159 pruned.
[I 2021-05-10 07:08:13,648] Trial 1160 pruned.
[I 2021-05-10 07:08:14,179] Trial 1161 pruned.
[I 2021-05-10 07:08:14,696] Trial 1162 pruned.
[I 2021-05-10 07:08:15,229] Trial 1163 pruned.
[I 2021-05-10 07:08:15,756] Trial 1164 pruned.
[I 2021-05-10 07:08:16,292] Trial 1165 pruned.
[I 2021-05-10 07:08:16,828] Trial 1166 pruned.
[I 2021-05-10 07:08:17,352] Trial 1167 pruned.
[I 2021-05-10 07:08:17,885] Trial 1168 pruned.
[I 2021-05-10 07:08:17,973] Trial 1169 pruned.
[I 2021-05-10 07:08:18,507] Trial 1170 pruned.
[I 2021-05-10 07:08:18,656] Trial 1171 pruned.
[I 2021-05-10 07:08:19,180] Trial 1172 pruned.
[I 2021-05-10 07:08:19,712] Trial 1173 pruned.
[I 2021-05-10 07:08:20,226] Trial 1174 pruned.
[I 2021-05-10 07:08:20,748] Trial 1175 pruned.
[I 2021-05-10 07:08:21,274] Trial 1176 pruned.
[I 2021-05-10 07:08:21,809] Trial 1177 pruned.
[I 2021-05-10 07:08:22,346] Trial 1178 pruned.
[I 2021-05-10 07:08:22,874] Trial 1179 pruned.
[I 2021-05-10 07:08:23,408] Trial 1180 pruned.
[I 2021-05-10 07:08:23,684] Trial 1181 pruned.
[I 2021-05-10 07:09:15,037] Trial 1182 finished with value: 373.17547607421875 and parameters: {'lr': 0.004822031720626179, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:09:15,569] Trial 1183 pruned.
[I 2021-05-10 07:09:16,105] Trial 1184 pruned.
[I 2021-05-10 07:09:16,638] Trial 1185 pruned.
[I 2021-05-10 07:09:16,728] Trial 1186 pruned.
[I 2021-05-10 07:09:17,264] Trial 1187 pruned.
[I 2021-05-10 07:09:17,800] Trial 1188 pruned.
[I 2021-05-10 07:09:18,335] Trial 1189 pruned.
[I 2021-05-10 07:09:18,870] Trial 1190 pruned.
[I 2021-05-10 07:09:19,402] Trial 1191 pruned.
[I 2021-05-10 07:09:19,935] Trial 1192 pruned.
[I 2021-05-10 07:09:20,467] Trial 1193 pruned.
[I 2021-05-10 07:09:21,000] Trial 1194 pruned.
[I 2021-05-10 07:09:21,530] Trial 1195 pruned.
[I 2021-05-10 07:09:22,063] Trial 1196 pruned.
[I 2021-05-10 07:09:22,596] Trial 1197 pruned.
[I 2021-05-10 07:09:23,133] Trial 1198 pruned.
[I 2021-05-10 07:09:23,286] Trial 1199 pruned.
[I 2021-05-10 07:09:23,823] Trial 1200 pruned.
[I 2021-05-10 07:09:23,915] Trial 1201 pruned.
[I 2021-05-10 07:09:24,448] Trial 1202 pruned.
[I 2021-05-10 07:09:24,963] Trial 1203 pruned.
[I 2021-05-10 07:09:25,496] Trial 1204 pruned.
[I 2021-05-10 07:10:11,201] Trial 1205 pruned.
[I 2021-05-10 07:11:02,455] Trial 1206 finished with value: 387.4253845214844 and parameters: {'lr': 0.004931047370367434, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:11:02,986] Trial 1207 pruned.
[I 2021-05-10 07:11:03,517] Trial 1208 pruned.
[I 2021-05-10 07:11:04,048] Trial 1209 pruned.
[I 2021-05-10 07:11:04,579] Trial 1210 pruned.
[I 2021-05-10 07:11:05,113] Trial 1211 pruned.
[I 2021-05-10 07:11:05,394] Trial 1212 pruned.
[I 2021-05-10 07:11:05,916] Trial 1213 pruned.
[I 2021-05-10 07:11:06,455] Trial 1214 pruned.
[I 2021-05-10 07:11:06,991] Trial 1215 pruned.
[I 2021-05-10 07:11:07,082] Trial 1216 pruned.
[I 2021-05-10 07:11:07,619] Trial 1217 pruned.
[I 2021-05-10 07:11:08,153] Trial 1218 pruned.
[I 2021-05-10 07:11:08,685] Trial 1219 pruned.
[I 2021-05-10 07:11:09,222] Trial 1220 pruned.
[I 2021-05-10 07:11:09,757] Trial 1221 pruned.
[I 2021-05-10 07:11:10,293] Trial 1222 pruned.
[I 2021-05-10 07:11:10,830] Trial 1223 pruned.
[I 2021-05-10 07:11:11,365] Trial 1224 pruned.
[I 2021-05-10 07:11:56,417] Trial 1225 pruned.
[I 2021-05-10 07:11:56,952] Trial 1226 pruned.
[I 2021-05-10 07:11:57,478] Trial 1227 pruned.
[I 2021-05-10 07:11:57,631] Trial 1228 pruned.
[I 2021-05-10 07:11:58,159] Trial 1229 pruned.
[I 2021-05-10 07:11:58,687] Trial 1230 pruned.
[I 2021-05-10 07:11:59,225] Trial 1231 pruned.
[I 2021-05-10 07:11:59,762] Trial 1232 pruned.
[I 2021-05-10 07:11:59,855] Trial 1233 pruned.
[I 2021-05-10 07:12:00,392] Trial 1234 pruned.
[I 2021-05-10 07:12:00,927] Trial 1235 pruned.
[I 2021-05-10 07:12:01,463] Trial 1236 pruned.
[I 2021-05-10 07:12:01,995] Trial 1237 pruned.
[I 2021-05-10 07:12:02,528] Trial 1238 pruned.
[I 2021-05-10 07:12:04,089] Trial 1239 pruned.
[I 2021-05-10 07:12:04,611] Trial 1240 pruned.
[I 2021-05-10 07:12:04,893] Trial 1241 pruned.
[I 2021-05-10 07:12:05,417] Trial 1242 pruned.
[I 2021-05-10 07:12:05,941] Trial 1243 pruned.
[I 2021-05-10 07:12:06,032] Trial 1244 pruned.
[I 2021-05-10 07:12:07,557] Trial 1245 pruned.
[I 2021-05-10 07:12:08,094] Trial 1246 pruned.
[I 2021-05-10 07:12:08,624] Trial 1247 pruned.
[I 2021-05-10 07:12:09,162] Trial 1248 pruned.
[I 2021-05-10 07:12:09,697] Trial 1249 pruned.
[I 2021-05-10 07:12:10,219] Trial 1250 pruned.
[I 2021-05-10 07:12:10,758] Trial 1251 pruned.
[I 2021-05-10 07:12:11,294] Trial 1252 pruned.
[I 2021-05-10 07:12:11,821] Trial 1253 pruned.
[I 2021-05-10 07:12:12,356] Trial 1254 pruned.
[I 2021-05-10 07:12:12,881] Trial 1255 pruned.
[I 2021-05-10 07:12:13,420] Trial 1256 pruned.
[I 2021-05-10 07:12:13,573] Trial 1257 pruned.
[I 2021-05-10 07:12:14,091] Trial 1258 pruned.
[I 2021-05-10 07:12:14,184] Trial 1259 pruned.
[I 2021-05-10 07:12:14,721] Trial 1260 pruned.
[I 2021-05-10 07:12:15,246] Trial 1261 pruned.
[I 2021-05-10 07:12:15,780] Trial 1262 pruned.
[I 2021-05-10 07:12:16,304] Trial 1263 pruned.
[I 2021-05-10 07:12:16,839] Trial 1264 pruned.
[I 2021-05-10 07:12:17,373] Trial 1265 pruned.
[I 2021-05-10 07:12:17,903] Trial 1266 pruned.
[I 2021-05-10 07:12:18,433] Trial 1267 pruned.
[I 2021-05-10 07:12:18,967] Trial 1268 pruned.
[I 2021-05-10 07:12:19,492] Trial 1269 pruned.
[I 2021-05-10 07:12:19,767] Trial 1270 pruned.
[I 2021-05-10 07:12:20,294] Trial 1271 pruned.
[I 2021-05-10 07:12:20,827] Trial 1272 pruned.
[I 2021-05-10 07:12:20,920] Trial 1273 pruned.
[I 2021-05-10 07:12:21,451] Trial 1274 pruned.
[I 2021-05-10 07:12:21,989] Trial 1275 pruned.
[I 2021-05-10 07:12:22,529] Trial 1276 pruned.
[I 2021-05-10 07:12:23,055] Trial 1277 pruned.
[I 2021-05-10 07:12:23,593] Trial 1278 pruned.
[I 2021-05-10 07:12:24,127] Trial 1279 pruned.
[I 2021-05-10 07:12:25,655] Trial 1280 pruned.
[I 2021-05-10 07:12:26,188] Trial 1281 pruned.
[I 2021-05-10 07:12:26,708] Trial 1282 pruned.
[I 2021-05-10 07:12:27,246] Trial 1283 pruned.
[I 2021-05-10 07:12:27,769] Trial 1284 pruned.
[I 2021-05-10 07:12:28,286] Trial 1285 pruned.
[I 2021-05-10 07:12:28,825] Trial 1286 pruned.
[I 2021-05-10 07:12:28,918] Trial 1287 pruned.
[I 2021-05-10 07:12:29,444] Trial 1288 pruned.
[I 2021-05-10 07:12:29,981] Trial 1289 pruned.
[I 2021-05-10 07:12:30,135] Trial 1290 pruned.
[I 2021-05-10 07:12:30,668] Trial 1291 pruned.
[I 2021-05-10 07:12:31,208] Trial 1292 pruned.
[I 2021-05-10 07:12:31,735] Trial 1293 pruned.
[I 2021-05-10 07:12:32,274] Trial 1294 pruned.
[I 2021-05-10 07:12:32,808] Trial 1295 pruned.
[I 2021-05-10 07:12:33,328] Trial 1296 pruned.
[I 2021-05-10 07:12:33,864] Trial 1297 pruned.
[I 2021-05-10 07:12:34,381] Trial 1298 pruned.
[I 2021-05-10 07:12:34,914] Trial 1299 pruned.
[I 2021-05-10 07:12:35,448] Trial 1300 pruned.
[I 2021-05-10 07:12:35,728] Trial 1301 pruned.
[I 2021-05-10 07:12:36,263] Trial 1302 pruned.
[I 2021-05-10 07:12:36,355] Trial 1303 pruned.
[I 2021-05-10 07:12:36,880] Trial 1304 pruned.
[I 2021-05-10 07:12:37,417] Trial 1305 pruned.
[I 2021-05-10 07:12:37,956] Trial 1306 pruned.
[I 2021-05-10 07:12:38,490] Trial 1307 pruned.
[I 2021-05-10 07:12:39,014] Trial 1308 pruned.
[I 2021-05-10 07:12:39,540] Trial 1309 pruned.
[I 2021-05-10 07:12:40,081] Trial 1310 pruned.
[I 2021-05-10 07:12:40,604] Trial 1311 pruned.
[I 2021-05-10 07:12:41,118] Trial 1312 pruned.
[I 2021-05-10 07:12:41,651] Trial 1313 pruned.
[I 2021-05-10 07:12:42,191] Trial 1314 pruned.
[I 2021-05-10 07:12:42,726] Trial 1315 pruned.
[I 2021-05-10 07:12:42,818] Trial 1316 pruned.
[I 2021-05-10 07:12:42,973] Trial 1317 pruned.
[I 2021-05-10 07:12:43,511] Trial 1318 pruned.
[I 2021-05-10 07:12:44,048] Trial 1319 pruned.
[I 2021-05-10 07:12:44,573] Trial 1320 pruned.
[I 2021-05-10 07:12:45,097] Trial 1321 pruned.
[I 2021-05-10 07:12:45,630] Trial 1322 pruned.
[I 2021-05-10 07:12:46,164] Trial 1323 pruned.
[I 2021-05-10 07:12:46,713] Trial 1324 pruned.
[I 2021-05-10 07:12:47,230] Trial 1325 pruned.
[I 2021-05-10 07:12:47,770] Trial 1326 pruned.
[I 2021-05-10 07:12:48,313] Trial 1327 pruned.
[I 2021-05-10 07:12:48,844] Trial 1328 pruned.
[I 2021-05-10 07:12:50,405] Trial 1329 pruned.
[I 2021-05-10 07:12:50,688] Trial 1330 pruned.
[I 2021-05-10 07:12:51,213] Trial 1331 pruned.
[I 2021-05-10 07:12:51,306] Trial 1332 pruned.
[I 2021-05-10 07:12:51,840] Trial 1333 pruned.
[I 2021-05-10 07:12:52,375] Trial 1334 pruned.
[I 2021-05-10 07:12:52,908] Trial 1335 pruned.
[I 2021-05-10 07:12:53,442] Trial 1336 pruned.
[I 2021-05-10 07:12:53,982] Trial 1337 pruned.
[I 2021-05-10 07:12:54,508] Trial 1338 pruned.
[I 2021-05-10 07:12:55,037] Trial 1339 pruned.
[I 2021-05-10 07:12:55,575] Trial 1340 pruned.
[I 2021-05-10 07:12:56,116] Trial 1341 pruned.
[I 2021-05-10 07:12:56,629] Trial 1342 pruned.
[I 2021-05-10 07:12:57,167] Trial 1343 pruned.
[I 2021-05-10 07:12:57,699] Trial 1344 pruned.
[I 2021-05-10 07:12:57,792] Trial 1345 pruned.
[I 2021-05-10 07:12:57,948] Trial 1346 pruned.
[I 2021-05-10 07:12:58,482] Trial 1347 pruned.
[I 2021-05-10 07:12:59,017] Trial 1348 pruned.
[I 2021-05-10 07:12:59,543] Trial 1349 pruned.
[I 2021-05-10 07:13:00,075] Trial 1350 pruned.
[I 2021-05-10 07:13:00,611] Trial 1351 pruned.
[I 2021-05-10 07:13:01,124] Trial 1352 pruned.
[I 2021-05-10 07:13:01,659] Trial 1353 pruned.
[I 2021-05-10 07:13:02,194] Trial 1354 pruned.
[I 2021-05-10 07:13:02,717] Trial 1355 pruned.
[I 2021-05-10 07:13:03,257] Trial 1356 pruned.
[I 2021-05-10 07:13:03,796] Trial 1357 pruned.
[I 2021-05-10 07:13:04,323] Trial 1358 pruned.
[I 2021-05-10 07:13:04,607] Trial 1359 pruned.
[I 2021-05-10 07:13:05,146] Trial 1360 pruned.
[I 2021-05-10 07:13:05,238] Trial 1361 pruned.
[I 2021-05-10 07:13:05,776] Trial 1362 pruned.
[I 2021-05-10 07:13:06,295] Trial 1363 pruned.
[I 2021-05-10 07:13:06,830] Trial 1364 pruned.
[I 2021-05-10 07:13:07,357] Trial 1365 pruned.
[I 2021-05-10 07:13:07,871] Trial 1366 pruned.
[I 2021-05-10 07:13:08,411] Trial 1367 pruned.
[I 2021-05-10 07:13:08,949] Trial 1368 pruned.
[I 2021-05-10 07:13:09,475] Trial 1369 pruned.
[I 2021-05-10 07:13:10,015] Trial 1370 pruned.
[I 2021-05-10 07:13:10,546] Trial 1371 pruned.
[I 2021-05-10 07:13:11,084] Trial 1372 pruned.
[I 2021-05-10 07:13:11,618] Trial 1373 pruned.
[I 2021-05-10 07:13:12,147] Trial 1374 pruned.
[I 2021-05-10 07:13:12,305] Trial 1375 pruned.
[I 2021-05-10 07:13:12,398] Trial 1376 pruned.
[I 2021-05-10 07:13:12,932] Trial 1377 pruned.
[I 2021-05-10 07:13:13,472] Trial 1378 pruned.
[I 2021-05-10 07:13:13,994] Trial 1379 pruned.
[I 2021-05-10 07:13:14,533] Trial 1380 pruned.
[I 2021-05-10 07:13:15,073] Trial 1381 pruned.
[I 2021-05-10 07:13:15,601] Trial 1382 pruned.
[I 2021-05-10 07:13:16,141] Trial 1383 pruned.
[I 2021-05-10 07:13:16,678] Trial 1384 pruned.
[I 2021-05-10 07:13:17,204] Trial 1385 pruned.
[I 2021-05-10 07:13:17,742] Trial 1386 pruned.
[I 2021-05-10 07:13:18,283] Trial 1387 pruned.
[I 2021-05-10 07:13:18,565] Trial 1388 pruned.
[I 2021-05-10 07:13:19,100] Trial 1389 pruned.
[I 2021-05-10 07:13:19,194] Trial 1390 pruned.
[I 2021-05-10 07:13:19,729] Trial 1391 pruned.
[I 2021-05-10 07:13:20,260] Trial 1392 pruned.
[I 2021-05-10 07:13:20,787] Trial 1393 pruned.
[I 2021-05-10 07:13:21,316] Trial 1394 pruned.
[I 2021-05-10 07:13:21,855] Trial 1395 pruned.
[I 2021-05-10 07:13:22,394] Trial 1396 pruned.
[I 2021-05-10 07:13:22,928] Trial 1397 pruned.
[I 2021-05-10 07:13:23,456] Trial 1398 pruned.
[I 2021-05-10 07:13:23,995] Trial 1399 pruned.
[I 2021-05-10 07:13:25,549] Trial 1400 pruned.
[I 2021-05-10 07:13:26,076] Trial 1401 pruned.
[I 2021-05-10 07:13:26,618] Trial 1402 pruned.
[I 2021-05-10 07:13:26,711] Trial 1403 pruned.
[I 2021-05-10 07:13:27,242] Trial 1404 pruned.
[I 2021-05-10 07:13:27,397] Trial 1405 pruned.
[I 2021-05-10 07:13:27,912] Trial 1406 pruned.
[I 2021-05-10 07:13:28,450] Trial 1407 pruned.
[I 2021-05-10 07:13:28,990] Trial 1408 pruned.
[I 2021-05-10 07:13:29,518] Trial 1409 pruned.
[I 2021-05-10 07:13:30,052] Trial 1410 pruned.
[I 2021-05-10 07:13:30,592] Trial 1411 pruned.
[I 2021-05-10 07:13:31,124] Trial 1412 pruned.
[I 2021-05-10 07:13:31,667] Trial 1413 pruned.
[I 2021-05-10 07:13:32,209] Trial 1414 pruned.
[I 2021-05-10 07:13:32,735] Trial 1415 pruned.
[I 2021-05-10 07:13:33,276] Trial 1416 pruned.
[I 2021-05-10 07:13:33,558] Trial 1417 pruned.
[I 2021-05-10 07:13:34,096] Trial 1418 pruned.
[I 2021-05-10 07:13:34,190] Trial 1419 pruned.
[I 2021-05-10 07:13:34,704] Trial 1420 pruned.
[I 2021-05-10 07:13:35,234] Trial 1421 pruned.
[I 2021-05-10 07:13:35,776] Trial 1422 pruned.
[I 2021-05-10 07:13:37,313] Trial 1423 pruned.
[I 2021-05-10 07:13:37,850] Trial 1424 pruned.
[I 2021-05-10 07:13:38,385] Trial 1425 pruned.
[I 2021-05-10 07:13:38,925] Trial 1426 pruned.
[I 2021-05-10 07:13:39,470] Trial 1427 pruned.
[I 2021-05-10 07:13:40,000] Trial 1428 pruned.
[I 2021-05-10 07:13:40,539] Trial 1429 pruned.
[I 2021-05-10 07:13:41,069] Trial 1430 pruned.
[I 2021-05-10 07:13:41,595] Trial 1431 pruned.
[I 2021-05-10 07:13:42,131] Trial 1432 pruned.
[I 2021-05-10 07:13:42,223] Trial 1433 pruned.
[I 2021-05-10 07:13:42,380] Trial 1434 pruned.
[I 2021-05-10 07:13:42,919] Trial 1435 pruned.
[I 2021-05-10 07:13:43,451] Trial 1436 pruned.
[I 2021-05-10 07:13:43,992] Trial 1437 pruned.
[I 2021-05-10 07:13:44,533] Trial 1438 pruned.
[I 2021-05-10 07:13:45,070] Trial 1439 pruned.
[I 2021-05-10 07:13:45,608] Trial 1440 pruned.
[I 2021-05-10 07:13:46,149] Trial 1441 pruned.
[I 2021-05-10 07:13:46,675] Trial 1442 pruned.
[I 2021-05-10 07:13:47,212] Trial 1443 pruned.
[I 2021-05-10 07:13:47,730] Trial 1444 pruned.
[I 2021-05-10 07:13:48,268] Trial 1445 pruned.
[I 2021-05-10 07:13:48,558] Trial 1446 pruned.
[I 2021-05-10 07:13:49,075] Trial 1447 pruned.
[I 2021-05-10 07:13:49,170] Trial 1448 pruned.
[I 2021-05-10 07:13:50,733] Trial 1449 pruned.
[I 2021-05-10 07:13:52,249] Trial 1450 pruned.
[I 2021-05-10 07:13:52,805] Trial 1451 pruned.
[I 2021-05-10 07:13:53,333] Trial 1452 pruned.
[I 2021-05-10 07:13:53,875] Trial 1453 pruned.
[I 2021-05-10 07:13:54,416] Trial 1454 pruned.
[I 2021-05-10 07:13:54,950] Trial 1455 pruned.
[I 2021-05-10 07:13:55,487] Trial 1456 pruned.
[I 2021-05-10 07:13:56,015] Trial 1457 pruned.
[I 2021-05-10 07:13:56,542] Trial 1458 pruned.
[I 2021-05-10 07:13:57,082] Trial 1459 pruned.
[I 2021-05-10 07:13:57,599] Trial 1460 pruned.
[I 2021-05-10 07:13:58,135] Trial 1461 pruned.
[I 2021-05-10 07:13:58,232] Trial 1462 pruned.
[I 2021-05-10 07:13:58,389] Trial 1463 pruned.
[I 2021-05-10 07:13:58,930] Trial 1464 pruned.
[I 2021-05-10 07:13:59,468] Trial 1465 pruned.
[I 2021-05-10 07:13:59,999] Trial 1466 pruned.
[I 2021-05-10 07:14:00,539] Trial 1467 pruned.
[I 2021-05-10 07:14:01,080] Trial 1468 pruned.
[I 2021-05-10 07:14:01,607] Trial 1469 pruned.
[I 2021-05-10 07:14:02,144] Trial 1470 pruned.
[I 2021-05-10 07:14:02,671] Trial 1471 pruned.
[I 2021-05-10 07:14:03,202] Trial 1472 pruned.
[I 2021-05-10 07:14:03,742] Trial 1473 pruned.
[I 2021-05-10 07:14:04,265] Trial 1474 pruned.
[I 2021-05-10 07:14:04,550] Trial 1475 pruned.
[I 2021-05-10 07:14:05,083] Trial 1476 pruned.
[I 2021-05-10 07:14:05,179] Trial 1477 pruned.
[I 2021-05-10 07:14:05,718] Trial 1478 pruned.
[I 2021-05-10 07:14:06,248] Trial 1479 pruned.
[I 2021-05-10 07:14:06,792] Trial 1480 pruned.
[I 2021-05-10 07:14:07,333] Trial 1481 pruned.
[I 2021-05-10 07:14:07,867] Trial 1482 pruned.
[I 2021-05-10 07:14:08,407] Trial 1483 pruned.
[I 2021-05-10 07:14:08,941] Trial 1484 pruned.
[I 2021-05-10 07:14:09,469] Trial 1485 pruned.
[I 2021-05-10 07:14:10,008] Trial 1486 pruned.
[I 2021-05-10 07:14:10,531] Trial 1487 pruned.
[I 2021-05-10 07:14:11,072] Trial 1488 pruned.
[I 2021-05-10 07:14:11,612] Trial 1489 pruned.
[I 2021-05-10 07:14:12,146] Trial 1490 pruned.
[I 2021-05-10 07:14:12,240] Trial 1491 pruned.
[I 2021-05-10 07:14:12,777] Trial 1492 pruned.
[I 2021-05-10 07:14:12,933] Trial 1493 pruned.
[I 2021-05-10 07:14:13,471] Trial 1494 pruned.
[I 2021-05-10 07:14:14,015] Trial 1495 pruned.
[I 2021-05-10 07:14:14,547] Trial 1496 pruned.
[I 2021-05-10 07:14:15,084] Trial 1497 pruned.
[I 2021-05-10 07:14:15,608] Trial 1498 pruned.
[I 2021-05-10 07:14:16,150] Trial 1499 pruned.
[I 2021-05-10 07:14:16,688] Trial 1500 pruned.
[I 2021-05-10 07:14:17,215] Trial 1501 pruned.
[I 2021-05-10 07:14:17,738] Trial 1502 pruned.
[I 2021-05-10 07:14:18,288] Trial 1503 pruned.
[I 2021-05-10 07:14:18,827] Trial 1504 pruned.
[I 2021-05-10 07:14:19,115] Trial 1505 pruned.
[I 2021-05-10 07:14:19,210] Trial 1506 pruned.
[I 2021-05-10 07:14:19,751] Trial 1507 pruned.
[I 2021-05-10 07:14:20,297] Trial 1508 pruned.
[I 2021-05-10 07:14:20,828] Trial 1509 pruned.
[I 2021-05-10 07:14:21,369] Trial 1510 pruned.
[I 2021-05-10 07:14:21,898] Trial 1511 pruned.
[I 2021-05-10 07:14:22,432] Trial 1512 pruned.
[I 2021-05-10 07:14:22,974] Trial 1513 pruned.
[I 2021-05-10 07:14:23,494] Trial 1514 pruned.
[I 2021-05-10 07:14:24,036] Trial 1515 pruned.
[I 2021-05-10 07:14:24,572] Trial 1516 pruned.
[I 2021-05-10 07:14:25,103] Trial 1517 pruned.
[I 2021-05-10 07:14:25,640] Trial 1518 pruned.
[I 2021-05-10 07:14:26,182] Trial 1519 pruned.
[I 2021-05-10 07:14:26,717] Trial 1520 pruned.
[I 2021-05-10 07:14:27,259] Trial 1521 pruned.
[I 2021-05-10 07:14:27,356] Trial 1522 pruned.
[I 2021-05-10 07:14:27,888] Trial 1523 pruned.
[I 2021-05-10 07:14:28,428] Trial 1524 pruned.
[I 2021-05-10 07:14:28,584] Trial 1525 pruned.
[I 2021-05-10 07:14:29,128] Trial 1526 pruned.
[I 2021-05-10 07:14:29,664] Trial 1527 pruned.
[I 2021-05-10 07:14:30,183] Trial 1528 pruned.
[I 2021-05-10 07:14:30,710] Trial 1529 pruned.
[I 2021-05-10 07:14:31,253] Trial 1530 pruned.
[I 2021-05-10 07:14:31,788] Trial 1531 pruned.
[I 2021-05-10 07:14:32,330] Trial 1532 pruned.
[I 2021-05-10 07:14:32,862] Trial 1533 pruned.
[I 2021-05-10 07:14:32,959] Trial 1534 pruned.
[I 2021-05-10 07:14:33,245] Trial 1535 pruned.
[I 2021-05-10 07:14:33,778] Trial 1536 pruned.
[I 2021-05-10 07:14:34,318] Trial 1537 pruned.
[I 2021-05-10 07:14:34,860] Trial 1538 pruned.
[I 2021-05-10 07:14:35,389] Trial 1539 pruned.
[I 2021-05-10 07:14:35,932] Trial 1540 pruned.
[I 2021-05-10 07:14:36,464] Trial 1541 pruned.
[I 2021-05-10 07:14:37,002] Trial 1542 pruned.
[I 2021-05-10 07:14:37,546] Trial 1543 pruned.
[I 2021-05-10 07:14:38,069] Trial 1544 pruned.
[I 2021-05-10 07:14:38,610] Trial 1545 pruned.
[I 2021-05-10 07:14:39,148] Trial 1546 pruned.
[I 2021-05-10 07:14:39,688] Trial 1547 pruned.
[I 2021-05-10 07:14:40,227] Trial 1548 pruned.
[I 2021-05-10 07:14:40,324] Trial 1549 pruned.
[I 2021-05-10 07:14:41,869] Trial 1550 pruned.
[I 2021-05-10 07:14:42,028] Trial 1551 pruned.
[I 2021-05-10 07:14:42,547] Trial 1552 pruned.
[I 2021-05-10 07:14:43,085] Trial 1553 pruned.
[I 2021-05-10 07:14:43,622] Trial 1554 pruned.
[I 2021-05-10 07:14:44,159] Trial 1555 pruned.
[I 2021-05-10 07:14:45,694] Trial 1556 pruned.
[I 2021-05-10 07:14:46,238] Trial 1557 pruned.
[I 2021-05-10 07:14:46,773] Trial 1558 pruned.
[I 2021-05-10 07:14:47,308] Trial 1559 pruned.
[I 2021-05-10 07:15:37,763] Trial 1560 finished with value: 378.09637451171875 and parameters: {'lr': 0.0048550338203317495, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:15:38,299] Trial 1561 pruned.
[I 2021-05-10 07:15:38,831] Trial 1562 pruned.
[I 2021-05-10 07:15:39,364] Trial 1563 pruned.
[I 2021-05-10 07:15:39,648] Trial 1564 pruned.
[I 2021-05-10 07:15:39,744] Trial 1565 pruned.
[I 2021-05-10 07:15:40,266] Trial 1566 pruned.
[I 2021-05-10 07:15:40,795] Trial 1567 pruned.
[I 2021-05-10 07:15:41,326] Trial 1568 pruned.
[I 2021-05-10 07:15:41,845] Trial 1569 pruned.
[I 2021-05-10 07:15:42,378] Trial 1570 pruned.
[I 2021-05-10 07:16:32,942] Trial 1571 finished with value: 402.8674011230469 and parameters: {'lr': 0.005467964630075268, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:16:33,480] Trial 1572 pruned.
[I 2021-05-10 07:16:34,012] Trial 1573 pruned.
[I 2021-05-10 07:16:34,544] Trial 1574 pruned.
[I 2021-05-10 07:16:35,073] Trial 1575 pruned.
[I 2021-05-10 07:16:35,602] Trial 1576 pruned.
[I 2021-05-10 07:16:36,131] Trial 1577 pruned.
[I 2021-05-10 07:16:36,226] Trial 1578 pruned.
[I 2021-05-10 07:16:36,750] Trial 1579 pruned.
[I 2021-05-10 07:16:37,279] Trial 1580 pruned.
[I 2021-05-10 07:16:37,436] Trial 1581 pruned.
[I 2021-05-10 07:16:37,967] Trial 1582 pruned.
[I 2021-05-10 07:16:38,485] Trial 1583 pruned.
[I 2021-05-10 07:16:39,005] Trial 1584 pruned.
[I 2021-05-10 07:16:39,535] Trial 1585 pruned.
[I 2021-05-10 07:16:40,072] Trial 1586 pruned.
[I 2021-05-10 07:16:40,602] Trial 1587 pruned.
[I 2021-05-10 07:16:41,138] Trial 1588 pruned.
[I 2021-05-10 07:16:41,668] Trial 1589 pruned.
[I 2021-05-10 07:16:42,204] Trial 1590 pruned.
[I 2021-05-10 07:16:43,744] Trial 1591 pruned.
[I 2021-05-10 07:16:44,273] Trial 1592 pruned.
[I 2021-05-10 07:16:44,551] Trial 1593 pruned.
[I 2021-05-10 07:16:45,081] Trial 1594 pruned.
[I 2021-05-10 07:16:45,177] Trial 1595 pruned.
[I 2021-05-10 07:16:45,699] Trial 1596 pruned.
[I 2021-05-10 07:16:46,234] Trial 1597 pruned.
[I 2021-05-10 07:16:46,773] Trial 1598 pruned.
[I 2021-05-10 07:16:47,302] Trial 1599 pruned.
[I 2021-05-10 07:16:47,835] Trial 1600 pruned.
[I 2021-05-10 07:16:48,373] Trial 1601 pruned.
[I 2021-05-10 07:16:48,911] Trial 1602 pruned.
[I 2021-05-10 07:16:49,451] Trial 1603 pruned.
[I 2021-05-10 07:16:50,997] Trial 1604 pruned.
[I 2021-05-10 07:16:51,527] Trial 1605 pruned.
[I 2021-05-10 07:16:52,044] Trial 1606 pruned.
[I 2021-05-10 07:16:52,579] Trial 1607 pruned.
[I 2021-05-10 07:16:52,677] Trial 1608 pruned.
[I 2021-05-10 07:16:53,211] Trial 1609 pruned.
[I 2021-05-10 07:16:53,373] Trial 1610 pruned.
[I 2021-05-10 07:16:53,900] Trial 1611 pruned.
[I 2021-05-10 07:16:54,442] Trial 1612 pruned.
[I 2021-05-10 07:16:54,986] Trial 1613 pruned.
[I 2021-05-10 07:16:55,525] Trial 1614 pruned.
[I 2021-05-10 07:16:56,062] Trial 1615 pruned.
[I 2021-05-10 07:16:56,600] Trial 1616 pruned.
[I 2021-05-10 07:16:57,128] Trial 1617 pruned.
[I 2021-05-10 07:16:57,669] Trial 1618 pruned.
[I 2021-05-10 07:16:58,207] Trial 1619 pruned.
[I 2021-05-10 07:16:58,753] Trial 1620 pruned.
[I 2021-05-10 07:16:59,300] Trial 1621 pruned.
[I 2021-05-10 07:16:59,839] Trial 1622 pruned.
[I 2021-05-10 07:17:00,124] Trial 1623 pruned.
[I 2021-05-10 07:17:00,221] Trial 1624 pruned.
[I 2021-05-10 07:17:00,760] Trial 1625 pruned.
[I 2021-05-10 07:17:01,304] Trial 1626 pruned.
[I 2021-05-10 07:17:01,840] Trial 1627 pruned.
[I 2021-05-10 07:17:02,381] Trial 1628 pruned.
[I 2021-05-10 07:17:02,914] Trial 1629 pruned.
[I 2021-05-10 07:17:03,455] Trial 1630 pruned.
[I 2021-05-10 07:17:03,999] Trial 1631 pruned.
[I 2021-05-10 07:17:04,535] Trial 1632 pruned.
[I 2021-05-10 07:17:05,072] Trial 1633 pruned.
[I 2021-05-10 07:17:05,599] Trial 1634 pruned.
[I 2021-05-10 07:17:06,262] Trial 1635 pruned.
[I 2021-05-10 07:17:06,819] Trial 1636 pruned.
[I 2021-05-10 07:17:06,914] Trial 1637 pruned.
[I 2021-05-10 07:17:07,461] Trial 1638 pruned.
[I 2021-05-10 07:17:07,986] Trial 1639 pruned.
[I 2021-05-10 07:17:08,526] Trial 1640 pruned.
[I 2021-05-10 07:17:09,070] Trial 1641 pruned.
[I 2021-05-10 07:17:09,228] Trial 1642 pruned.
[I 2021-05-10 07:17:09,766] Trial 1643 pruned.
[I 2021-05-10 07:17:10,295] Trial 1644 pruned.
[I 2021-05-10 07:17:10,836] Trial 1645 pruned.
[I 2021-05-10 07:17:11,366] Trial 1646 pruned.
[I 2021-05-10 07:17:11,897] Trial 1647 pruned.
[I 2021-05-10 07:17:12,435] Trial 1648 pruned.
[I 2021-05-10 07:17:12,968] Trial 1649 pruned.
[I 2021-05-10 07:17:13,498] Trial 1650 pruned.
[I 2021-05-10 07:17:13,782] Trial 1651 pruned.
[I 2021-05-10 07:17:13,880] Trial 1652 pruned.
[I 2021-05-10 07:17:14,420] Trial 1653 pruned.
[I 2021-05-10 07:17:14,950] Trial 1654 pruned.
[I 2021-05-10 07:17:15,496] Trial 1655 pruned.
[I 2021-05-10 07:17:16,030] Trial 1656 pruned.
[I 2021-05-10 07:17:16,569] Trial 1657 pruned.
[I 2021-05-10 07:17:18,132] Trial 1658 pruned.
[I 2021-05-10 07:17:18,663] Trial 1659 pruned.
[I 2021-05-10 07:17:19,203] Trial 1660 pruned.
[I 2021-05-10 07:17:19,727] Trial 1661 pruned.
[I 2021-05-10 07:17:20,268] Trial 1662 pruned.
[I 2021-05-10 07:17:20,811] Trial 1663 pruned.
[I 2021-05-10 07:17:21,333] Trial 1664 pruned.
[I 2021-05-10 07:17:21,873] Trial 1665 pruned.
[I 2021-05-10 07:17:21,970] Trial 1666 pruned.
[I 2021-05-10 07:17:22,513] Trial 1667 pruned.
[I 2021-05-10 07:17:23,045] Trial 1668 pruned.
[I 2021-05-10 07:17:23,206] Trial 1669 pruned.
[I 2021-05-10 07:18:14,457] Trial 1670 finished with value: 364.2746276855469 and parameters: {'lr': 0.005454666607413614, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:18:14,993] Trial 1671 pruned.
[I 2021-05-10 07:18:15,533] Trial 1672 pruned.
[I 2021-05-10 07:18:16,075] Trial 1673 pruned.
[I 2021-05-10 07:18:16,616] Trial 1674 pruned.
[I 2021-05-10 07:18:17,144] Trial 1675 pruned.
[I 2021-05-10 07:18:17,687] Trial 1676 pruned.
[I 2021-05-10 07:18:18,228] Trial 1677 pruned.
[I 2021-05-10 07:18:18,757] Trial 1678 pruned.
[I 2021-05-10 07:18:19,301] Trial 1679 pruned.
[I 2021-05-10 07:18:19,843] Trial 1680 pruned.
[I 2021-05-10 07:18:20,132] Trial 1681 pruned.
[I 2021-05-10 07:18:20,231] Trial 1682 pruned.
[I 2021-05-10 07:18:20,776] Trial 1683 pruned.
[I 2021-05-10 07:18:21,314] Trial 1684 pruned.
[I 2021-05-10 07:18:21,854] Trial 1685 pruned.
[I 2021-05-10 07:18:22,400] Trial 1686 pruned.
[I 2021-05-10 07:18:22,942] Trial 1687 pruned.
[I 2021-05-10 07:18:23,473] Trial 1688 pruned.
[I 2021-05-10 07:18:24,010] Trial 1689 pruned.
[I 2021-05-10 07:18:24,550] Trial 1690 pruned.
[I 2021-05-10 07:18:25,091] Trial 1691 pruned.
[I 2021-05-10 07:18:25,621] Trial 1692 pruned.
[I 2021-05-10 07:18:26,167] Trial 1693 pruned.
[I 2021-05-10 07:18:26,723] Trial 1694 pruned.
[I 2021-05-10 07:18:27,268] Trial 1695 pruned.
[I 2021-05-10 07:18:27,365] Trial 1696 pruned.
[I 2021-05-10 07:18:27,892] Trial 1697 pruned.
[I 2021-05-10 07:18:28,054] Trial 1698 pruned.
[I 2021-05-10 07:18:28,597] Trial 1699 pruned.
[I 2021-05-10 07:18:29,143] Trial 1700 pruned.
[I 2021-05-10 07:18:29,683] Trial 1701 pruned.
[I 2021-05-10 07:18:30,215] Trial 1702 pruned.
[I 2021-05-10 07:18:30,757] Trial 1703 pruned.
[I 2021-05-10 07:18:31,304] Trial 1704 pruned.
[I 2021-05-10 07:18:31,837] Trial 1705 pruned.
[I 2021-05-10 07:18:32,381] Trial 1706 pruned.
[I 2021-05-10 07:18:33,948] Trial 1707 pruned.
[I 2021-05-10 07:18:34,489] Trial 1708 pruned.
[I 2021-05-10 07:18:35,032] Trial 1709 pruned.
[I 2021-05-10 07:18:35,132] Trial 1710 pruned.
[I 2021-05-10 07:18:35,423] Trial 1711 pruned.
[I 2021-05-10 07:18:35,962] Trial 1712 pruned.
[I 2021-05-10 07:18:36,508] Trial 1713 pruned.
[I 2021-05-10 07:18:37,051] Trial 1714 pruned.
[I 2021-05-10 07:18:37,590] Trial 1715 pruned.
[I 2021-05-10 07:18:38,134] Trial 1716 pruned.
[I 2021-05-10 07:18:38,672] Trial 1717 pruned.
[I 2021-05-10 07:18:39,218] Trial 1718 pruned.
[I 2021-05-10 07:18:39,752] Trial 1719 pruned.
[I 2021-05-10 07:18:40,296] Trial 1720 pruned.
[I 2021-05-10 07:18:40,837] Trial 1721 pruned.
[I 2021-05-10 07:18:41,381] Trial 1722 pruned.
[I 2021-05-10 07:18:41,911] Trial 1723 pruned.
[I 2021-05-10 07:18:42,453] Trial 1724 pruned.
[I 2021-05-10 07:18:42,552] Trial 1725 pruned.
[I 2021-05-10 07:18:43,093] Trial 1726 pruned.
[I 2021-05-10 07:18:43,634] Trial 1727 pruned.
[I 2021-05-10 07:18:44,175] Trial 1728 pruned.
[I 2021-05-10 07:18:44,339] Trial 1729 pruned.
[I 2021-05-10 07:18:44,881] Trial 1730 pruned.
[I 2021-05-10 07:18:45,422] Trial 1731 pruned.
[I 2021-05-10 07:18:45,955] Trial 1732 pruned.
[I 2021-05-10 07:18:46,501] Trial 1733 pruned.
[I 2021-05-10 07:18:47,049] Trial 1734 pruned.
[I 2021-05-10 07:18:47,595] Trial 1735 pruned.
[I 2021-05-10 07:18:48,138] Trial 1736 pruned.
[I 2021-05-10 07:18:48,685] Trial 1737 pruned.
[I 2021-05-10 07:18:49,227] Trial 1738 pruned.
[I 2021-05-10 07:18:49,327] Trial 1739 pruned.
[I 2021-05-10 07:18:49,618] Trial 1740 pruned.
[I 2021-05-10 07:18:50,163] Trial 1741 pruned.
[I 2021-05-10 07:18:50,707] Trial 1742 pruned.
[I 2021-05-10 07:18:51,242] Trial 1743 pruned.
[I 2021-05-10 07:18:51,782] Trial 1744 pruned.
[I 2021-05-10 07:18:52,328] Trial 1745 pruned.
[I 2021-05-10 07:18:52,868] Trial 1746 pruned.
[I 2021-05-10 07:18:53,413] Trial 1747 pruned.
[I 2021-05-10 07:18:53,960] Trial 1748 pruned.
[I 2021-05-10 07:18:54,510] Trial 1749 pruned.
[I 2021-05-10 07:18:55,053] Trial 1750 pruned.
[I 2021-05-10 07:18:55,599] Trial 1751 pruned.
[I 2021-05-10 07:18:56,146] Trial 1752 pruned.
[I 2021-05-10 07:18:56,685] Trial 1753 pruned.
[I 2021-05-10 07:18:56,786] Trial 1754 pruned.
[I 2021-05-10 07:19:48,189] Trial 1755 finished with value: 375.2056579589844 and parameters: {'lr': 0.003716190873740577, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:19:48,731] Trial 1756 pruned.
[I 2021-05-10 07:19:49,258] Trial 1757 pruned.
[I 2021-05-10 07:19:49,801] Trial 1758 pruned.
[I 2021-05-10 07:19:50,345] Trial 1759 pruned.
[I 2021-05-10 07:19:50,877] Trial 1760 pruned.
[I 2021-05-10 07:19:51,419] Trial 1761 pruned.
[I 2021-05-10 07:19:51,955] Trial 1762 pruned.
[I 2021-05-10 07:19:52,504] Trial 1763 pruned.
[I 2021-05-10 07:19:53,047] Trial 1764 pruned.
[I 2021-05-10 07:19:53,582] Trial 1765 pruned.
[I 2021-05-10 07:19:54,123] Trial 1766 pruned.
[I 2021-05-10 07:19:54,664] Trial 1767 pruned.
[I 2021-05-10 07:19:55,194] Trial 1768 pruned.
[I 2021-05-10 07:19:55,736] Trial 1769 pruned.
[I 2021-05-10 07:19:56,272] Trial 1770 pruned.
[I 2021-05-10 07:19:56,817] Trial 1771 pruned.
[I 2021-05-10 07:19:57,365] Trial 1772 pruned.
[I 2021-05-10 07:19:57,896] Trial 1773 pruned.
[I 2021-05-10 07:20:42,982] Trial 1774 pruned.
[I 2021-05-10 07:20:43,531] Trial 1775 pruned.
[I 2021-05-10 07:20:44,061] Trial 1776 pruned.
[I 2021-05-10 07:20:44,591] Trial 1777 pruned.
[I 2021-05-10 07:20:45,123] Trial 1778 pruned.
[I 2021-05-10 07:20:45,669] Trial 1779 pruned.
[I 2021-05-10 07:20:46,217] Trial 1780 pruned.
[I 2021-05-10 07:20:46,758] Trial 1781 pruned.
[I 2021-05-10 07:20:47,313] Trial 1782 pruned.
[I 2021-05-10 07:20:47,860] Trial 1783 pruned.
[I 2021-05-10 07:20:48,394] Trial 1784 pruned.
[I 2021-05-10 07:20:48,935] Trial 1785 pruned.
[I 2021-05-10 07:20:49,478] Trial 1786 pruned.
[I 2021-05-10 07:20:50,013] Trial 1787 pruned.
[I 2021-05-10 07:20:50,559] Trial 1788 pruned.
[I 2021-05-10 07:20:50,720] Trial 1789 pruned.
[I 2021-05-10 07:20:51,264] Trial 1790 pruned.
[I 2021-05-10 07:20:51,806] Trial 1791 pruned.
[I 2021-05-10 07:20:52,345] Trial 1792 pruned.
[I 2021-05-10 07:20:52,912] Trial 1793 pruned.
[I 2021-05-10 07:20:53,469] Trial 1794 pruned.
[I 2021-05-10 07:20:54,009] Trial 1795 pruned.
[I 2021-05-10 07:20:54,551] Trial 1796 pruned.
[I 2021-05-10 07:20:55,087] Trial 1797 pruned.
[I 2021-05-10 07:20:55,634] Trial 1798 pruned.
[I 2021-05-10 07:20:56,181] Trial 1799 pruned.
[I 2021-05-10 07:20:56,470] Trial 1800 pruned.
[I 2021-05-10 07:20:57,015] Trial 1801 pruned.
[I 2021-05-10 07:20:57,562] Trial 1802 pruned.
[I 2021-05-10 07:20:58,093] Trial 1803 pruned.
[I 2021-05-10 07:20:58,622] Trial 1804 pruned.
[I 2021-05-10 07:20:59,155] Trial 1805 pruned.
[I 2021-05-10 07:20:59,701] Trial 1806 pruned.
[I 2021-05-10 07:21:00,261] Trial 1807 pruned.
[I 2021-05-10 07:21:00,800] Trial 1808 pruned.
[I 2021-05-10 07:21:01,346] Trial 1809 pruned.
[I 2021-05-10 07:21:01,887] Trial 1810 pruned.
[I 2021-05-10 07:21:02,425] Trial 1811 pruned.
[I 2021-05-10 07:21:02,968] Trial 1812 pruned.
[I 2021-05-10 07:21:03,517] Trial 1813 pruned.
[I 2021-05-10 07:21:54,013] Trial 1814 finished with value: 374.1999816894531 and parameters: {'lr': 0.004217006035579503, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:21:54,547] Trial 1815 pruned.
[I 2021-05-10 07:21:55,089] Trial 1816 pruned.
[I 2021-05-10 07:21:55,630] Trial 1817 pruned.
[I 2021-05-10 07:21:56,165] Trial 1818 pruned.
[I 2021-05-10 07:21:56,715] Trial 1819 pruned.
[I 2021-05-10 07:21:56,876] Trial 1820 pruned.
[I 2021-05-10 07:21:57,424] Trial 1821 pruned.
[I 2021-05-10 07:22:48,824] Trial 1822 finished with value: 375.1763000488281 and parameters: {'lr': 0.004268228382126167, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:22:49,368] Trial 1823 pruned.
[I 2021-05-10 07:22:49,903] Trial 1824 pruned.
[I 2021-05-10 07:22:50,453] Trial 1825 pruned.
[I 2021-05-10 07:22:50,996] Trial 1826 pruned.
[I 2021-05-10 07:22:51,529] Trial 1827 pruned.
[I 2021-05-10 07:22:52,074] Trial 1828 pruned.
[I 2021-05-10 07:22:52,616] Trial 1829 pruned.
[I 2021-05-10 07:22:52,909] Trial 1830 pruned.
[I 2021-05-10 07:22:53,460] Trial 1831 pruned.
[I 2021-05-10 07:22:54,001] Trial 1832 pruned.
[I 2021-05-10 07:22:54,543] Trial 1833 pruned.
[I 2021-05-10 07:22:55,092] Trial 1834 pruned.
[I 2021-05-10 07:22:55,623] Trial 1835 pruned.
[I 2021-05-10 07:22:56,172] Trial 1836 pruned.
[I 2021-05-10 07:22:56,707] Trial 1837 pruned.
[I 2021-05-10 07:22:57,285] Trial 1838 pruned.
[I 2021-05-10 07:22:57,833] Trial 1839 pruned.
[I 2021-05-10 07:22:58,374] Trial 1840 pruned.
[I 2021-05-10 07:22:58,940] Trial 1841 pruned.
[I 2021-05-10 07:22:59,487] Trial 1842 pruned.
[I 2021-05-10 07:23:00,028] Trial 1843 pruned.
[I 2021-05-10 07:23:00,579] Trial 1844 pruned.
[I 2021-05-10 07:23:01,124] Trial 1845 pruned.
[I 2021-05-10 07:23:01,286] Trial 1846 pruned.
[I 2021-05-10 07:23:01,832] Trial 1847 pruned.
[I 2021-05-10 07:23:02,373] Trial 1848 pruned.
[I 2021-05-10 07:23:02,921] Trial 1849 pruned.
[I 2021-05-10 07:23:03,473] Trial 1850 pruned.
[I 2021-05-10 07:23:04,016] Trial 1851 pruned.
[I 2021-05-10 07:23:04,564] Trial 1852 pruned.
[I 2021-05-10 07:23:05,113] Trial 1853 pruned.
[I 2021-05-10 07:23:05,665] Trial 1854 pruned.
[I 2021-05-10 07:23:06,232] Trial 1855 pruned.
[I 2021-05-10 07:23:06,779] Trial 1856 pruned.
[I 2021-05-10 07:23:07,328] Trial 1857 pruned.
[I 2021-05-10 07:23:07,622] Trial 1858 pruned.
[I 2021-05-10 07:23:08,166] Trial 1859 pruned.
[I 2021-05-10 07:23:08,713] Trial 1860 pruned.
[I 2021-05-10 07:23:09,254] Trial 1861 pruned.
[I 2021-05-10 07:23:09,786] Trial 1862 pruned.
[I 2021-05-10 07:23:10,331] Trial 1863 pruned.
[I 2021-05-10 07:23:10,866] Trial 1864 pruned.
[I 2021-05-10 07:23:11,409] Trial 1865 pruned.
[I 2021-05-10 07:23:11,956] Trial 1866 pruned.
[I 2021-05-10 07:23:12,490] Trial 1867 pruned.
[I 2021-05-10 07:23:13,035] Trial 1868 pruned.
[I 2021-05-10 07:23:13,578] Trial 1869 pruned.
[I 2021-05-10 07:23:14,114] Trial 1870 pruned.
[I 2021-05-10 07:23:14,656] Trial 1871 pruned.
[I 2021-05-10 07:23:15,206] Trial 1872 pruned.
[I 2021-05-10 07:23:15,750] Trial 1873 pruned.
[I 2021-05-10 07:23:15,916] Trial 1874 pruned.
[I 2021-05-10 07:24:06,476] Trial 1875 finished with value: 397.85491943359375 and parameters: {'lr': 0.003489627511299479, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:24:07,021] Trial 1876 pruned.
[I 2021-05-10 07:24:07,564] Trial 1877 pruned.
[I 2021-05-10 07:24:08,096] Trial 1878 pruned.
[I 2021-05-10 07:24:08,637] Trial 1879 pruned.
[I 2021-05-10 07:24:09,183] Trial 1880 pruned.
[I 2021-05-10 07:24:09,723] Trial 1881 pruned.
[I 2021-05-10 07:24:10,270] Trial 1882 pruned.
[I 2021-05-10 07:24:10,811] Trial 1883 pruned.
[I 2021-05-10 07:24:11,357] Trial 1884 pruned.
[I 2021-05-10 07:24:11,906] Trial 1885 pruned.
[I 2021-05-10 07:24:12,443] Trial 1886 pruned.
[I 2021-05-10 07:24:12,985] Trial 1887 pruned.
[I 2021-05-10 07:24:13,276] Trial 1888 pruned.
[I 2021-05-10 07:24:13,814] Trial 1889 pruned.
[I 2021-05-10 07:24:14,361] Trial 1890 pruned.
[I 2021-05-10 07:24:14,896] Trial 1891 pruned.
[I 2021-05-10 07:24:15,441] Trial 1892 pruned.
[I 2021-05-10 07:24:15,989] Trial 1893 pruned.
[I 2021-05-10 07:24:16,524] Trial 1894 pruned.
[I 2021-05-10 07:24:17,071] Trial 1895 pruned.
[I 2021-05-10 07:24:17,614] Trial 1896 pruned.
[I 2021-05-10 07:24:18,167] Trial 1897 pruned.
[I 2021-05-10 07:24:18,725] Trial 1898 pruned.
[I 2021-05-10 07:24:19,278] Trial 1899 pruned.
[I 2021-05-10 07:24:19,812] Trial 1900 pruned.
[I 2021-05-10 07:24:20,362] Trial 1901 pruned.
[I 2021-05-10 07:24:20,896] Trial 1902 pruned.
[I 2021-05-10 07:24:21,061] Trial 1903 pruned.
[I 2021-05-10 07:24:21,610] Trial 1904 pruned.
[I 2021-05-10 07:24:22,152] Trial 1905 pruned.
[I 2021-05-10 07:25:13,551] Trial 1906 finished with value: 370.29925537109375 and parameters: {'lr': 0.0030591669845220137, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:25:14,095] Trial 1907 pruned.
[I 2021-05-10 07:26:05,307] Trial 1908 finished with value: 376.9954528808594 and parameters: {'lr': 0.0022547135093814634, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:26:05,848] Trial 1909 pruned.
[I 2021-05-10 07:26:06,393] Trial 1910 pruned.
[I 2021-05-10 07:26:06,940] Trial 1911 pruned.
[I 2021-05-10 07:26:07,483] Trial 1912 pruned.
[I 2021-05-10 07:26:08,031] Trial 1913 pruned.
[I 2021-05-10 07:26:08,580] Trial 1914 pruned.
[I 2021-05-10 07:26:09,111] Trial 1915 pruned.
[I 2021-05-10 07:26:09,652] Trial 1916 pruned.
[I 2021-05-10 07:26:10,199] Trial 1917 pruned.
[I 2021-05-10 07:26:10,492] Trial 1918 pruned.
[I 2021-05-10 07:26:12,062] Trial 1919 pruned.
[I 2021-05-10 07:26:12,606] Trial 1920 pruned.
[I 2021-05-10 07:26:13,152] Trial 1921 pruned.
[I 2021-05-10 07:26:13,714] Trial 1922 pruned.
[I 2021-05-10 07:27:05,025] Trial 1923 finished with value: 376.0091552734375 and parameters: {'lr': 0.002971992276515012, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:27:05,578] Trial 1924 pruned.
[I 2021-05-10 07:27:06,123] Trial 1925 pruned.
[I 2021-05-10 07:27:06,668] Trial 1926 pruned.
[I 2021-05-10 07:27:07,212] Trial 1927 pruned.
[I 2021-05-10 07:27:07,760] Trial 1928 pruned.
[I 2021-05-10 07:27:08,310] Trial 1929 pruned.
[I 2021-05-10 07:27:08,856] Trial 1930 pruned.
[I 2021-05-10 07:27:09,418] Trial 1931 pruned.
[I 2021-05-10 07:27:09,603] Trial 1932 pruned.
[I 2021-05-10 07:27:10,159] Trial 1933 pruned.
[I 2021-05-10 07:27:10,708] Trial 1934 pruned.
[I 2021-05-10 07:27:11,261] Trial 1935 pruned.
[I 2021-05-10 07:27:11,811] Trial 1936 pruned.
[I 2021-05-10 07:27:12,355] Trial 1937 pruned.
[I 2021-05-10 07:27:12,902] Trial 1938 pruned.
[I 2021-05-10 07:27:13,447] Trial 1939 pruned.
[I 2021-05-10 07:27:13,997] Trial 1940 pruned.
[I 2021-05-10 07:27:14,542] Trial 1941 pruned.
[I 2021-05-10 07:27:15,080] Trial 1942 pruned.
[I 2021-05-10 07:27:15,621] Trial 1943 pruned.
[I 2021-05-10 07:27:16,169] Trial 1944 pruned.
[I 2021-05-10 07:27:16,717] Trial 1945 pruned.
[I 2021-05-10 07:27:17,009] Trial 1946 pruned.
[I 2021-05-10 07:27:17,553] Trial 1947 pruned.
[I 2021-05-10 07:27:18,098] Trial 1948 pruned.
[I 2021-05-10 07:27:18,642] Trial 1949 pruned.
[I 2021-05-10 07:27:19,190] Trial 1950 pruned.
[I 2021-05-10 07:27:19,739] Trial 1951 pruned.
[I 2021-05-10 07:27:20,286] Trial 1952 pruned.
[I 2021-05-10 07:27:20,833] Trial 1953 pruned.
[I 2021-05-10 07:27:21,377] Trial 1954 pruned.
[I 2021-05-10 07:27:21,927] Trial 1955 pruned.
[I 2021-05-10 07:27:22,478] Trial 1956 pruned.
[I 2021-05-10 07:27:23,022] Trial 1957 pruned.
[I 2021-05-10 07:27:23,566] Trial 1958 pruned.
[I 2021-05-10 07:27:24,113] Trial 1959 pruned.
[I 2021-05-10 07:27:24,276] Trial 1960 pruned.
[I 2021-05-10 07:27:24,824] Trial 1961 pruned.
[I 2021-05-10 07:27:25,371] Trial 1962 pruned.
[I 2021-05-10 07:27:25,919] Trial 1963 pruned.
[I 2021-05-10 07:27:26,462] Trial 1964 pruned.
[I 2021-05-10 07:27:27,008] Trial 1965 pruned.
[I 2021-05-10 07:27:27,560] Trial 1966 pruned.
[I 2021-05-10 07:27:28,106] Trial 1967 pruned.
[I 2021-05-10 07:27:28,650] Trial 1968 pruned.
[I 2021-05-10 07:27:29,195] Trial 1969 pruned.
[I 2021-05-10 07:27:29,733] Trial 1970 pruned.
[I 2021-05-10 07:27:30,280] Trial 1971 pruned.
[I 2021-05-10 07:27:30,572] Trial 1972 pruned.
[I 2021-05-10 07:27:31,121] Trial 1973 pruned.
[I 2021-05-10 07:27:31,665] Trial 1974 pruned.
[I 2021-05-10 07:27:32,215] Trial 1975 pruned.
[I 2021-05-10 07:27:32,762] Trial 1976 pruned.
[I 2021-05-10 07:27:33,309] Trial 1977 pruned.
[I 2021-05-10 07:27:33,855] Trial 1978 pruned.
[I 2021-05-10 07:27:34,401] Trial 1979 pruned.
[I 2021-05-10 07:27:34,953] Trial 1980 pruned.
[I 2021-05-10 07:27:35,498] Trial 1981 pruned.
[I 2021-05-10 07:27:36,043] Trial 1982 pruned.
[I 2021-05-10 07:27:36,586] Trial 1983 pruned.
[I 2021-05-10 07:27:37,137] Trial 1984 pruned.
[I 2021-05-10 07:27:37,687] Trial 1985 pruned.
[I 2021-05-10 07:27:38,234] Trial 1986 pruned.
[I 2021-05-10 07:27:38,779] Trial 1987 pruned.
[I 2021-05-10 07:27:38,947] Trial 1988 pruned.
[I 2021-05-10 07:27:39,498] Trial 1989 pruned.
[I 2021-05-10 07:27:40,043] Trial 1990 pruned.
[I 2021-05-10 07:27:40,589] Trial 1991 pruned.
[I 2021-05-10 07:27:41,134] Trial 1992 pruned.
[I 2021-05-10 07:27:41,682] Trial 1993 pruned.
[I 2021-05-10 07:27:42,228] Trial 1994 pruned.
[I 2021-05-10 07:27:42,776] Trial 1995 pruned.
[I 2021-05-10 07:27:43,326] Trial 1996 pruned.
[I 2021-05-10 07:27:43,869] Trial 1997 pruned.
[I 2021-05-10 07:27:44,404] Trial 1998 pruned.
[I 2021-05-10 07:27:44,951] Trial 1999 pruned.
[I 2021-05-10 07:27:45,502] Trial 2000 pruned.
[I 2021-05-10 07:27:45,797] Trial 2001 pruned.
[I 2021-05-10 07:27:46,346] Trial 2002 pruned.
[I 2021-05-10 07:27:46,896] Trial 2003 pruned.
[I 2021-05-10 07:27:47,446] Trial 2004 pruned.
[I 2021-05-10 07:27:47,991] Trial 2005 pruned.
[I 2021-05-10 07:27:48,537] Trial 2006 pruned.
[I 2021-05-10 07:27:49,085] Trial 2007 pruned.
[I 2021-05-10 07:28:40,354] Trial 2008 finished with value: 369.5242004394531 and parameters: {'lr': 0.0035005881361194285, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:28:40,900] Trial 2009 pruned.
[I 2021-05-10 07:28:41,450] Trial 2010 pruned.
[I 2021-05-10 07:28:41,997] Trial 2011 pruned.
[I 2021-05-10 07:28:42,544] Trial 2012 pruned.
[I 2021-05-10 07:28:43,090] Trial 2013 pruned.
[I 2021-05-10 07:28:43,644] Trial 2014 pruned.
[I 2021-05-10 07:28:44,195] Trial 2015 pruned.
[I 2021-05-10 07:28:44,739] Trial 2016 pruned.
[I 2021-05-10 07:28:45,286] Trial 2017 pruned.
[I 2021-05-10 07:28:45,831] Trial 2018 pruned.
[I 2021-05-10 07:28:46,000] Trial 2019 pruned.
[I 2021-05-10 07:28:46,554] Trial 2020 pruned.
[I 2021-05-10 07:29:27,786] Trial 2021 pruned.
[I 2021-05-10 07:29:28,339] Trial 2022 pruned.
[I 2021-05-10 07:29:28,885] Trial 2023 pruned.
[I 2021-05-10 07:29:29,437] Trial 2024 pruned.
[I 2021-05-10 07:29:29,976] Trial 2025 pruned.
[I 2021-05-10 07:29:30,523] Trial 2026 pruned.
[I 2021-05-10 07:29:31,068] Trial 2027 pruned.
[I 2021-05-10 07:29:31,618] Trial 2028 pruned.
[I 2021-05-10 07:29:31,914] Trial 2029 pruned.
[I 2021-05-10 07:29:32,465] Trial 2030 pruned.
[I 2021-05-10 07:29:33,014] Trial 2031 pruned.
[I 2021-05-10 07:29:33,567] Trial 2032 pruned.
[I 2021-05-10 07:29:34,113] Trial 2033 pruned.
[I 2021-05-10 07:29:34,658] Trial 2034 pruned.
[I 2021-05-10 07:29:35,204] Trial 2035 pruned.
[I 2021-05-10 07:29:35,753] Trial 2036 pruned.
[I 2021-05-10 07:29:36,305] Trial 2037 pruned.
[I 2021-05-10 07:29:36,860] Trial 2038 pruned.
[I 2021-05-10 07:29:37,408] Trial 2039 pruned.
[I 2021-05-10 07:29:37,961] Trial 2040 pruned.
[I 2021-05-10 07:29:38,509] Trial 2041 pruned.
[I 2021-05-10 07:29:39,055] Trial 2042 pruned.
[I 2021-05-10 07:29:39,601] Trial 2043 pruned.
[I 2021-05-10 07:29:40,151] Trial 2044 pruned.
[I 2021-05-10 07:29:40,701] Trial 2045 pruned.
[I 2021-05-10 07:29:41,254] Trial 2046 pruned.
[I 2021-05-10 07:29:41,424] Trial 2047 pruned.
[I 2021-05-10 07:29:41,972] Trial 2048 pruned.
[I 2021-05-10 07:29:42,519] Trial 2049 pruned.
[I 2021-05-10 07:29:43,070] Trial 2050 pruned.
[I 2021-05-10 07:29:43,624] Trial 2051 pruned.
[I 2021-05-10 07:29:44,173] Trial 2052 pruned.
[I 2021-05-10 07:29:44,708] Trial 2053 pruned.
[I 2021-05-10 07:29:45,260] Trial 2054 pruned.
[I 2021-05-10 07:29:45,809] Trial 2055 pruned.
[I 2021-05-10 07:29:46,361] Trial 2056 pruned.
[I 2021-05-10 07:30:37,683] Trial 2057 finished with value: 364.2471618652344 and parameters: {'lr': 0.003217608933766813, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:30:38,235] Trial 2058 pruned.
[I 2021-05-10 07:30:38,785] Trial 2059 pruned.
[I 2021-05-10 07:30:39,334] Trial 2060 pruned.
[I 2021-05-10 07:30:39,879] Trial 2061 pruned.
[I 2021-05-10 07:30:40,175] Trial 2062 pruned.
[I 2021-05-10 07:30:40,721] Trial 2063 pruned.
[I 2021-05-10 07:30:41,269] Trial 2064 pruned.
[I 2021-05-10 07:30:41,818] Trial 2065 pruned.
[I 2021-05-10 07:30:42,371] Trial 2066 pruned.
[I 2021-05-10 07:30:42,918] Trial 2067 pruned.
[I 2021-05-10 07:30:43,465] Trial 2068 pruned.
[I 2021-05-10 07:30:44,016] Trial 2069 pruned.
[I 2021-05-10 07:30:44,562] Trial 2070 pruned.
[I 2021-05-10 07:30:45,110] Trial 2071 pruned.
[I 2021-05-10 07:30:45,663] Trial 2072 pruned.
[I 2021-05-10 07:30:46,217] Trial 2073 pruned.
[I 2021-05-10 07:30:46,771] Trial 2074 pruned.
[I 2021-05-10 07:30:46,939] Trial 2075 pruned.
[I 2021-05-10 07:30:47,490] Trial 2076 pruned.
[I 2021-05-10 07:31:38,783] Trial 2077 finished with value: 377.02410888671875 and parameters: {'lr': 0.005515837979486723, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:31:39,330] Trial 2078 pruned.
[I 2021-05-10 07:32:30,702] Trial 2079 finished with value: 390.64471435546875 and parameters: {'lr': 0.004903323293489917, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:32:31,239] Trial 2080 pruned.
[I 2021-05-10 07:32:31,789] Trial 2081 pruned.
[I 2021-05-10 07:32:32,340] Trial 2082 pruned.
[I 2021-05-10 07:32:32,890] Trial 2083 pruned.
[I 2021-05-10 07:32:33,440] Trial 2084 pruned.
[I 2021-05-10 07:32:33,987] Trial 2085 pruned.
[I 2021-05-10 07:32:34,282] Trial 2086 pruned.
[I 2021-05-10 07:32:34,831] Trial 2087 pruned.
[I 2021-05-10 07:32:35,380] Trial 2088 pruned.
[I 2021-05-10 07:32:35,929] Trial 2089 pruned.
[I 2021-05-10 07:32:36,478] Trial 2090 pruned.
[I 2021-05-10 07:32:37,030] Trial 2091 pruned.
[I 2021-05-10 07:32:37,576] Trial 2092 pruned.
[I 2021-05-10 07:32:38,123] Trial 2093 pruned.
[I 2021-05-10 07:32:38,674] Trial 2094 pruned.
[I 2021-05-10 07:32:39,222] Trial 2095 pruned.
[I 2021-05-10 07:32:39,773] Trial 2096 pruned.
[I 2021-05-10 07:32:40,324] Trial 2097 pruned.
[I 2021-05-10 07:32:40,877] Trial 2098 pruned.
[I 2021-05-10 07:32:41,422] Trial 2099 pruned.
[I 2021-05-10 07:33:32,775] Trial 2100 finished with value: 391.7146301269531 and parameters: {'lr': 0.004557725173128541, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:33:33,327] Trial 2101 pruned.
[I 2021-05-10 07:33:33,879] Trial 2102 pruned.
[I 2021-05-10 07:33:34,428] Trial 2103 pruned.
[I 2021-05-10 07:33:34,979] Trial 2104 pruned.
[I 2021-05-10 07:33:35,528] Trial 2105 pruned.
[I 2021-05-10 07:33:36,074] Trial 2106 pruned.
[I 2021-05-10 07:33:36,244] Trial 2107 pruned.
[I 2021-05-10 07:33:36,791] Trial 2108 pruned.
[I 2021-05-10 07:33:37,326] Trial 2109 pruned.
[I 2021-05-10 07:33:37,875] Trial 2110 pruned.
[I 2021-05-10 07:33:38,424] Trial 2111 pruned.
[I 2021-05-10 07:33:38,973] Trial 2112 pruned.
[I 2021-05-10 07:33:39,518] Trial 2113 pruned.
[I 2021-05-10 07:33:39,816] Trial 2114 pruned.
[I 2021-05-10 07:33:40,371] Trial 2115 pruned.
[I 2021-05-10 07:33:40,920] Trial 2116 pruned.
[I 2021-05-10 07:33:41,469] Trial 2117 pruned.
[I 2021-05-10 07:33:42,021] Trial 2118 pruned.
[I 2021-05-10 07:33:42,579] Trial 2119 pruned.
[I 2021-05-10 07:33:43,130] Trial 2120 pruned.
[I 2021-05-10 07:33:43,680] Trial 2121 pruned.
[I 2021-05-10 07:33:44,234] Trial 2122 pruned.
[I 2021-05-10 07:33:44,782] Trial 2123 pruned.
[I 2021-05-10 07:33:45,330] Trial 2124 pruned.
[I 2021-05-10 07:33:45,876] Trial 2125 pruned.
[I 2021-05-10 07:33:46,426] Trial 2126 pruned.
[I 2021-05-10 07:33:46,976] Trial 2127 pruned.
[I 2021-05-10 07:33:47,525] Trial 2128 pruned.
[I 2021-05-10 07:33:48,073] Trial 2129 pruned.
[I 2021-05-10 07:33:48,242] Trial 2130 pruned.
[I 2021-05-10 07:33:48,791] Trial 2131 pruned.
[I 2021-05-10 07:33:49,343] Trial 2132 pruned.
[I 2021-05-10 07:33:49,889] Trial 2133 pruned.
[I 2021-05-10 07:33:50,439] Trial 2134 pruned.
[I 2021-05-10 07:33:50,974] Trial 2135 pruned.
[I 2021-05-10 07:33:51,513] Trial 2136 pruned.
[I 2021-05-10 07:33:52,060] Trial 2137 pruned.
[I 2021-05-10 07:33:52,615] Trial 2138 pruned.
[I 2021-05-10 07:33:53,155] Trial 2139 pruned.
[I 2021-05-10 07:33:53,708] Trial 2140 pruned.
[I 2021-05-10 07:33:54,259] Trial 2141 pruned.
[I 2021-05-10 07:33:54,798] Trial 2142 pruned.
[I 2021-05-10 07:33:55,096] Trial 2143 pruned.
[I 2021-05-10 07:33:55,643] Trial 2144 pruned.
[I 2021-05-10 07:33:56,193] Trial 2145 pruned.
[I 2021-05-10 07:33:56,730] Trial 2146 pruned.
[I 2021-05-10 07:33:57,285] Trial 2147 pruned.
[I 2021-05-10 07:33:57,833] Trial 2148 pruned.
[I 2021-05-10 07:33:58,381] Trial 2149 pruned.
[I 2021-05-10 07:33:58,935] Trial 2150 pruned.
[I 2021-05-10 07:34:50,302] Trial 2151 finished with value: 388.742919921875 and parameters: {'lr': 0.006169827073871566, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:34:50,841] Trial 2152 pruned.
[I 2021-05-10 07:34:51,391] Trial 2153 pruned.
[I 2021-05-10 07:34:51,945] Trial 2154 pruned.
[I 2021-05-10 07:34:53,530] Trial 2155 pruned.
[I 2021-05-10 07:34:54,068] Trial 2156 pruned.
[I 2021-05-10 07:34:54,620] Trial 2157 pruned.
[I 2021-05-10 07:34:55,172] Trial 2158 pruned.
[I 2021-05-10 07:34:55,715] Trial 2159 pruned.
[I 2021-05-10 07:34:55,884] Trial 2160 pruned.
[I 2021-05-10 07:34:56,424] Trial 2161 pruned.
[I 2021-05-10 07:34:56,973] Trial 2162 pruned.
[I 2021-05-10 07:34:57,519] Trial 2163 pruned.
[I 2021-05-10 07:34:58,065] Trial 2164 pruned.
[I 2021-05-10 07:34:58,613] Trial 2165 pruned.
[I 2021-05-10 07:34:59,159] Trial 2166 pruned.
[I 2021-05-10 07:34:59,710] Trial 2167 pruned.
[I 2021-05-10 07:35:00,263] Trial 2168 pruned.
[I 2021-05-10 07:35:00,802] Trial 2169 pruned.
[I 2021-05-10 07:35:01,350] Trial 2170 pruned.
[I 2021-05-10 07:35:01,648] Trial 2171 pruned.
[I 2021-05-10 07:35:02,201] Trial 2172 pruned.
[I 2021-05-10 07:35:02,742] Trial 2173 pruned.
[I 2021-05-10 07:35:03,292] Trial 2174 pruned.
[I 2021-05-10 07:35:03,845] Trial 2175 pruned.
[I 2021-05-10 07:35:04,386] Trial 2176 pruned.
[I 2021-05-10 07:35:04,939] Trial 2177 pruned.
[I 2021-05-10 07:35:05,488] Trial 2178 pruned.
[I 2021-05-10 07:35:06,028] Trial 2179 pruned.
[I 2021-05-10 07:35:06,577] Trial 2180 pruned.
[I 2021-05-10 07:35:07,129] Trial 2181 pruned.
[I 2021-05-10 07:35:07,680] Trial 2182 pruned.
[I 2021-05-10 07:35:08,228] Trial 2183 pruned.
[I 2021-05-10 07:35:08,780] Trial 2184 pruned.
[I 2021-05-10 07:35:09,329] Trial 2185 pruned.
[I 2021-05-10 07:35:09,870] Trial 2186 pruned.
[I 2021-05-10 07:35:10,420] Trial 2187 pruned.
[I 2021-05-10 07:35:10,591] Trial 2188 pruned.
[I 2021-05-10 07:35:11,132] Trial 2189 pruned.
[I 2021-05-10 07:35:11,678] Trial 2190 pruned.
[I 2021-05-10 07:35:12,226] Trial 2191 pruned.
[I 2021-05-10 07:35:12,777] Trial 2192 pruned.
[I 2021-05-10 07:35:13,323] Trial 2193 pruned.
[I 2021-05-10 07:35:13,875] Trial 2194 pruned.
[I 2021-05-10 07:35:14,426] Trial 2195 pruned.
[I 2021-05-10 07:35:14,962] Trial 2196 pruned.
[I 2021-05-10 07:35:15,513] Trial 2197 pruned.
[I 2021-05-10 07:35:16,062] Trial 2198 pruned.
[I 2021-05-10 07:35:16,613] Trial 2199 pruned.
[I 2021-05-10 07:35:17,158] Trial 2200 pruned.
[I 2021-05-10 07:35:17,712] Trial 2201 pruned.
[I 2021-05-10 07:35:18,269] Trial 2202 pruned.
[I 2021-05-10 07:35:18,808] Trial 2203 pruned.
[I 2021-05-10 07:35:19,107] Trial 2204 pruned.
[I 2021-05-10 07:35:19,663] Trial 2205 pruned.
[I 2021-05-10 07:35:20,204] Trial 2206 pruned.
[I 2021-05-10 07:35:20,754] Trial 2207 pruned.
[I 2021-05-10 07:35:21,309] Trial 2208 pruned.
[I 2021-05-10 07:35:21,860] Trial 2209 pruned.
[I 2021-05-10 07:35:22,409] Trial 2210 pruned.
[I 2021-05-10 07:35:22,968] Trial 2211 pruned.
[I 2021-05-10 07:35:23,516] Trial 2212 pruned.
[I 2021-05-10 07:35:24,060] Trial 2213 pruned.
[I 2021-05-10 07:35:24,612] Trial 2214 pruned.
[I 2021-05-10 07:35:24,784] Trial 2215 pruned.
[I 2021-05-10 07:35:25,335] Trial 2216 pruned.
[I 2021-05-10 07:35:25,878] Trial 2217 pruned.
[I 2021-05-10 07:35:26,426] Trial 2218 pruned.
[I 2021-05-10 07:35:26,967] Trial 2219 pruned.
[I 2021-05-10 07:35:27,513] Trial 2220 pruned.
[I 2021-05-10 07:35:28,059] Trial 2221 pruned.
[I 2021-05-10 07:35:28,608] Trial 2222 pruned.
[I 2021-05-10 07:35:29,153] Trial 2223 pruned.
[I 2021-05-10 07:35:29,706] Trial 2224 pruned.
[I 2021-05-10 07:35:30,257] Trial 2225 pruned.
[I 2021-05-10 07:35:30,812] Trial 2226 pruned.
[I 2021-05-10 07:35:31,105] Trial 2227 pruned.
[I 2021-05-10 07:35:31,654] Trial 2228 pruned.
[I 2021-05-10 07:35:32,210] Trial 2229 pruned.
[I 2021-05-10 07:35:32,752] Trial 2230 pruned.
[I 2021-05-10 07:35:33,304] Trial 2231 pruned.
[I 2021-05-10 07:35:33,857] Trial 2232 pruned.
[I 2021-05-10 07:35:34,408] Trial 2233 pruned.
[I 2021-05-10 07:35:34,962] Trial 2234 pruned.
[I 2021-05-10 07:35:35,511] Trial 2235 pruned.
[I 2021-05-10 07:35:36,062] Trial 2236 pruned.
[I 2021-05-10 07:35:36,600] Trial 2237 pruned.
[I 2021-05-10 07:35:37,148] Trial 2238 pruned.
[I 2021-05-10 07:35:37,699] Trial 2239 pruned.
[I 2021-05-10 07:35:38,246] Trial 2240 pruned.
[I 2021-05-10 07:35:38,800] Trial 2241 pruned.
[I 2021-05-10 07:35:39,353] Trial 2242 pruned.
[I 2021-05-10 07:35:39,522] Trial 2243 pruned.
[I 2021-05-10 07:35:40,069] Trial 2244 pruned.
[I 2021-05-10 07:35:40,619] Trial 2245 pruned.
[I 2021-05-10 07:35:41,173] Trial 2246 pruned.
[I 2021-05-10 07:35:41,712] Trial 2247 pruned.
[I 2021-05-10 07:35:42,263] Trial 2248 pruned.
[I 2021-05-10 07:35:42,816] Trial 2249 pruned.
[I 2021-05-10 07:35:43,361] Trial 2250 pruned.
[I 2021-05-10 07:35:43,912] Trial 2251 pruned.
[I 2021-05-10 07:35:44,462] Trial 2252 pruned.
[I 2021-05-10 07:35:45,012] Trial 2253 pruned.
[I 2021-05-10 07:35:45,551] Trial 2254 pruned.
[I 2021-05-10 07:35:45,851] Trial 2255 pruned.
[I 2021-05-10 07:35:46,401] Trial 2256 pruned.
[I 2021-05-10 07:35:46,945] Trial 2257 pruned.
[I 2021-05-10 07:35:47,496] Trial 2258 pruned.
[I 2021-05-10 07:35:48,052] Trial 2259 pruned.
[I 2021-05-10 07:35:48,597] Trial 2260 pruned.
[I 2021-05-10 07:35:49,152] Trial 2261 pruned.
[I 2021-05-10 07:35:49,700] Trial 2262 pruned.
[I 2021-05-10 07:35:50,253] Trial 2263 pruned.
[I 2021-05-10 07:35:50,792] Trial 2264 pruned.
[I 2021-05-10 07:35:51,343] Trial 2265 pruned.
[I 2021-05-10 07:35:51,892] Trial 2266 pruned.
[I 2021-05-10 07:36:42,442] Trial 2267 finished with value: 388.42431640625 and parameters: {'lr': 0.004562451968718337, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:36:42,996] Trial 2268 pruned.
[I 2021-05-10 07:36:43,547] Trial 2269 pruned.
[I 2021-05-10 07:36:44,090] Trial 2270 pruned.
[I 2021-05-10 07:36:44,628] Trial 2271 pruned.
[I 2021-05-10 07:36:44,799] Trial 2272 pruned.
[I 2021-05-10 07:36:45,352] Trial 2273 pruned.
[I 2021-05-10 07:36:45,897] Trial 2274 pruned.
[I 2021-05-10 07:36:46,453] Trial 2275 pruned.
[I 2021-05-10 07:36:47,006] Trial 2276 pruned.
[I 2021-05-10 07:36:47,553] Trial 2277 pruned.
[I 2021-05-10 07:36:48,104] Trial 2278 pruned.
[I 2021-05-10 07:36:48,654] Trial 2279 pruned.
[I 2021-05-10 07:36:49,203] Trial 2280 pruned.
[I 2021-05-10 07:36:49,751] Trial 2281 pruned.
[I 2021-05-10 07:36:50,305] Trial 2282 pruned.
[I 2021-05-10 07:36:50,605] Trial 2283 pruned.
[I 2021-05-10 07:36:51,150] Trial 2284 pruned.
[I 2021-05-10 07:36:51,698] Trial 2285 pruned.
[I 2021-05-10 07:36:52,247] Trial 2286 pruned.
[I 2021-05-10 07:36:52,809] Trial 2287 pruned.
[I 2021-05-10 07:36:53,358] Trial 2288 pruned.
[I 2021-05-10 07:36:53,911] Trial 2289 pruned.
[I 2021-05-10 07:36:54,463] Trial 2290 pruned.
[I 2021-05-10 07:36:55,017] Trial 2291 pruned.
[I 2021-05-10 07:36:55,571] Trial 2292 pruned.
[I 2021-05-10 07:36:56,123] Trial 2293 pruned.
[I 2021-05-10 07:36:56,663] Trial 2294 pruned.
[I 2021-05-10 07:36:57,212] Trial 2295 pruned.
[I 2021-05-10 07:36:57,761] Trial 2296 pruned.
[I 2021-05-10 07:36:58,312] Trial 2297 pruned.
[I 2021-05-10 07:36:58,847] Trial 2298 pruned.
[I 2021-05-10 07:36:59,020] Trial 2299 pruned.
[I 2021-05-10 07:36:59,570] Trial 2300 pruned.
[I 2021-05-10 07:37:00,119] Trial 2301 pruned.
[I 2021-05-10 07:37:00,668] Trial 2302 pruned.
[I 2021-05-10 07:37:01,223] Trial 2303 pruned.
[I 2021-05-10 07:37:01,769] Trial 2304 pruned.
[I 2021-05-10 07:37:02,324] Trial 2305 pruned.
[I 2021-05-10 07:37:02,876] Trial 2306 pruned.
[I 2021-05-10 07:37:03,431] Trial 2307 pruned.
[I 2021-05-10 07:37:03,976] Trial 2308 pruned.
[I 2021-05-10 07:37:04,528] Trial 2309 pruned.
[I 2021-05-10 07:37:05,080] Trial 2310 pruned.
[I 2021-05-10 07:37:05,626] Trial 2311 pruned.
[I 2021-05-10 07:37:05,926] Trial 2312 pruned.
[I 2021-05-10 07:37:06,483] Trial 2313 pruned.
[I 2021-05-10 07:37:07,038] Trial 2314 pruned.
[I 2021-05-10 07:37:07,582] Trial 2315 pruned.
[I 2021-05-10 07:37:08,134] Trial 2316 pruned.
[I 2021-05-10 07:37:08,687] Trial 2317 pruned.
[I 2021-05-10 07:37:09,236] Trial 2318 pruned.
[I 2021-05-10 07:37:09,784] Trial 2319 pruned.
[I 2021-05-10 07:37:10,338] Trial 2320 pruned.
[I 2021-05-10 07:37:10,885] Trial 2321 pruned.
[I 2021-05-10 07:37:11,439] Trial 2322 pruned.
[I 2021-05-10 07:37:11,988] Trial 2323 pruned.
[I 2021-05-10 07:38:02,466] Trial 2324 finished with value: 377.05889892578125 and parameters: {'lr': 0.0037247149812796856, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:38:03,012] Trial 2325 pruned.
[I 2021-05-10 07:38:03,566] Trial 2326 pruned.
[I 2021-05-10 07:38:04,117] Trial 2327 pruned.
[I 2021-05-10 07:38:04,285] Trial 2328 pruned.
[I 2021-05-10 07:38:04,841] Trial 2329 pruned.
[I 2021-05-10 07:38:05,395] Trial 2330 pruned.
[I 2021-05-10 07:38:05,938] Trial 2331 pruned.
[I 2021-05-10 07:38:06,487] Trial 2332 pruned.
[I 2021-05-10 07:38:07,040] Trial 2333 pruned.
[I 2021-05-10 07:38:07,591] Trial 2334 pruned.
[I 2021-05-10 07:38:08,131] Trial 2335 pruned.
[I 2021-05-10 07:38:08,680] Trial 2336 pruned.
[I 2021-05-10 07:38:09,229] Trial 2337 pruned.
[I 2021-05-10 07:38:09,776] Trial 2338 pruned.
[I 2021-05-10 07:38:10,327] Trial 2339 pruned.
[I 2021-05-10 07:38:10,628] Trial 2340 pruned.
[I 2021-05-10 07:38:12,208] Trial 2341 pruned.
[I 2021-05-10 07:38:12,757] Trial 2342 pruned.
[I 2021-05-10 07:38:13,311] Trial 2343 pruned.
[I 2021-05-10 07:39:04,611] Trial 2344 finished with value: 372.6285705566406 and parameters: {'lr': 0.0038822782794517914, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:39:05,160] Trial 2345 pruned.
[I 2021-05-10 07:39:05,701] Trial 2346 pruned.
[I 2021-05-10 07:39:06,283] Trial 2347 pruned.
[I 2021-05-10 07:39:06,834] Trial 2348 pruned.
[I 2021-05-10 07:39:07,379] Trial 2349 pruned.
[I 2021-05-10 07:39:07,931] Trial 2350 pruned.
[I 2021-05-10 07:39:08,483] Trial 2351 pruned.
[I 2021-05-10 07:39:09,028] Trial 2352 pruned.
[I 2021-05-10 07:39:09,576] Trial 2353 pruned.
[I 2021-05-10 07:39:10,133] Trial 2354 pruned.
[I 2021-05-10 07:39:10,683] Trial 2355 pruned.
[I 2021-05-10 07:39:11,231] Trial 2356 pruned.
[I 2021-05-10 07:39:11,403] Trial 2357 pruned.
[I 2021-05-10 07:39:11,957] Trial 2358 pruned.
[I 2021-05-10 07:39:12,508] Trial 2359 pruned.
[I 2021-05-10 07:39:13,050] Trial 2360 pruned.
[I 2021-05-10 07:39:13,600] Trial 2361 pruned.
[I 2021-05-10 07:39:14,152] Trial 2362 pruned.
[I 2021-05-10 07:39:14,699] Trial 2363 pruned.
[I 2021-05-10 07:39:15,251] Trial 2364 pruned.
[I 2021-05-10 07:39:15,801] Trial 2365 pruned.
[I 2021-05-10 07:39:16,345] Trial 2366 pruned.
[I 2021-05-10 07:39:16,897] Trial 2367 pruned.
[I 2021-05-10 07:39:17,451] Trial 2368 pruned.
[I 2021-05-10 07:39:17,752] Trial 2369 pruned.
[I 2021-05-10 07:39:18,312] Trial 2370 pruned.
[I 2021-05-10 07:39:18,865] Trial 2371 pruned.
[I 2021-05-10 07:39:19,419] Trial 2372 pruned.
[I 2021-05-10 07:39:19,963] Trial 2373 pruned.
[I 2021-05-10 07:39:20,527] Trial 2374 pruned.
[I 2021-05-10 07:39:21,079] Trial 2375 pruned.
[I 2021-05-10 07:39:21,621] Trial 2376 pruned.
[I 2021-05-10 07:39:22,172] Trial 2377 pruned.
[I 2021-05-10 07:39:22,727] Trial 2378 pruned.
[I 2021-05-10 07:39:23,283] Trial 2379 pruned.
[I 2021-05-10 07:39:23,827] Trial 2380 pruned.
[I 2021-05-10 07:39:24,372] Trial 2381 pruned.
[I 2021-05-10 07:39:24,928] Trial 2382 pruned.
[I 2021-05-10 07:39:25,470] Trial 2383 pruned.
[I 2021-05-10 07:39:26,023] Trial 2384 pruned.
[I 2021-05-10 07:39:26,197] Trial 2385 pruned.
[I 2021-05-10 07:39:27,774] Trial 2386 pruned.
[I 2021-05-10 07:39:28,325] Trial 2387 pruned.
[I 2021-05-10 07:39:28,878] Trial 2388 pruned.
[I 2021-05-10 07:39:29,431] Trial 2389 pruned.
[I 2021-05-10 07:39:29,972] Trial 2390 pruned.
[I 2021-05-10 07:39:30,525] Trial 2391 pruned.
[I 2021-05-10 07:39:31,081] Trial 2392 pruned.
[I 2021-05-10 07:39:31,622] Trial 2393 pruned.
[I 2021-05-10 07:39:32,180] Trial 2394 pruned.
[I 2021-05-10 07:39:32,734] Trial 2395 pruned.
[I 2021-05-10 07:39:33,292] Trial 2396 pruned.
[I 2021-05-10 07:39:33,589] Trial 2397 pruned.
[I 2021-05-10 07:39:34,143] Trial 2398 pruned.
[I 2021-05-10 07:39:34,698] Trial 2399 pruned.
[I 2021-05-10 07:39:35,239] Trial 2400 pruned.
[I 2021-05-10 07:39:35,795] Trial 2401 pruned.
[I 2021-05-10 07:39:36,348] Trial 2402 pruned.
[I 2021-05-10 07:39:36,891] Trial 2403 pruned.
[I 2021-05-10 07:39:37,448] Trial 2404 pruned.
[I 2021-05-10 07:39:38,004] Trial 2405 pruned.
[I 2021-05-10 07:39:38,555] Trial 2406 pruned.
[I 2021-05-10 07:39:39,087] Trial 2407 pruned.
[I 2021-05-10 07:40:30,414] Trial 2408 finished with value: 376.510986328125 and parameters: {'lr': 0.003033836716734779, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:40:30,969] Trial 2409 pruned.
[I 2021-05-10 07:40:31,518] Trial 2410 pruned.
[I 2021-05-10 07:40:32,079] Trial 2411 pruned.
[I 2021-05-10 07:40:32,633] Trial 2412 pruned.
[I 2021-05-10 07:40:32,808] Trial 2413 pruned.
[I 2021-05-10 07:40:33,357] Trial 2414 pruned.
[I 2021-05-10 07:40:33,914] Trial 2415 pruned.
[I 2021-05-10 07:40:34,465] Trial 2416 pruned.
[I 2021-05-10 07:40:35,009] Trial 2417 pruned.
[I 2021-05-10 07:40:35,559] Trial 2418 pruned.
[I 2021-05-10 07:40:36,112] Trial 2419 pruned.
[I 2021-05-10 07:40:36,654] Trial 2420 pruned.
[I 2021-05-10 07:40:37,213] Trial 2421 pruned.
[I 2021-05-10 07:41:28,451] Trial 2422 finished with value: 380.41326904296875 and parameters: {'lr': 0.004738148289830671, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:41:29,006] Trial 2423 pruned.
[I 2021-05-10 07:41:29,549] Trial 2424 pruned.
[I 2021-05-10 07:41:30,104] Trial 2425 pruned.
[I 2021-05-10 07:41:30,653] Trial 2426 pruned.
[I 2021-05-10 07:41:31,198] Trial 2427 pruned.
[I 2021-05-10 07:41:31,502] Trial 2428 pruned.
[I 2021-05-10 07:41:32,057] Trial 2429 pruned.
[I 2021-05-10 07:41:32,606] Trial 2430 pruned.
[I 2021-05-10 07:41:33,164] Trial 2431 pruned.
[I 2021-05-10 07:41:33,719] Trial 2432 pruned.
[I 2021-05-10 07:41:34,279] Trial 2433 pruned.
[I 2021-05-10 07:41:34,821] Trial 2434 pruned.
[I 2021-05-10 07:41:35,375] Trial 2435 pruned.
[I 2021-05-10 07:41:35,928] Trial 2436 pruned.
[I 2021-05-10 07:41:36,476] Trial 2437 pruned.
[I 2021-05-10 07:41:37,032] Trial 2438 pruned.
[I 2021-05-10 07:41:37,583] Trial 2439 pruned.
[I 2021-05-10 07:41:38,140] Trial 2440 pruned.
[I 2021-05-10 07:41:38,315] Trial 2441 pruned.
[I 2021-05-10 07:41:38,870] Trial 2442 pruned.
[I 2021-05-10 07:41:39,420] Trial 2443 pruned.
[I 2021-05-10 07:41:39,963] Trial 2444 pruned.
[I 2021-05-10 07:41:40,515] Trial 2445 pruned.
[I 2021-05-10 07:41:41,071] Trial 2446 pruned.
[I 2021-05-10 07:41:41,614] Trial 2447 pruned.
[I 2021-05-10 07:41:42,170] Trial 2448 pruned.
[I 2021-05-10 07:41:42,725] Trial 2449 pruned.
[I 2021-05-10 07:41:43,277] Trial 2450 pruned.
[I 2021-05-10 07:41:43,830] Trial 2451 pruned.
[I 2021-05-10 07:41:44,382] Trial 2452 pruned.
[I 2021-05-10 07:41:44,683] Trial 2453 pruned.
[I 2021-05-10 07:41:45,234] Trial 2454 pruned.
[I 2021-05-10 07:41:45,791] Trial 2455 pruned.
[I 2021-05-10 07:41:46,344] Trial 2456 pruned.
[I 2021-05-10 07:41:46,892] Trial 2457 pruned.
[I 2021-05-10 07:41:47,448] Trial 2458 pruned.
[I 2021-05-10 07:41:48,004] Trial 2459 pruned.
[I 2021-05-10 07:41:48,558] Trial 2460 pruned.
[I 2021-05-10 07:41:49,095] Trial 2461 pruned.
[I 2021-05-10 07:41:49,649] Trial 2462 pruned.
[I 2021-05-10 07:41:50,206] Trial 2463 pruned.
[I 2021-05-10 07:41:50,749] Trial 2464 pruned.
[I 2021-05-10 07:41:51,301] Trial 2465 pruned.
[I 2021-05-10 07:41:51,857] Trial 2466 pruned.
[I 2021-05-10 07:41:52,411] Trial 2467 pruned.
[I 2021-05-10 07:41:52,960] Trial 2468 pruned.
[I 2021-05-10 07:41:53,135] Trial 2469 pruned.
[I 2021-05-10 07:41:53,694] Trial 2470 pruned.
[I 2021-05-10 07:41:54,242] Trial 2471 pruned.
[I 2021-05-10 07:41:54,795] Trial 2472 pruned.
[I 2021-05-10 07:41:55,354] Trial 2473 pruned.
[I 2021-05-10 07:42:45,838] Trial 2474 finished with value: 367.11328125 and parameters: {'lr': 0.0034586979582817753, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:42:46,391] Trial 2475 pruned.
[I 2021-05-10 07:42:46,940] Trial 2476 pruned.
[I 2021-05-10 07:42:47,489] Trial 2477 pruned.
[I 2021-05-10 07:42:48,034] Trial 2478 pruned.
[I 2021-05-10 07:42:48,579] Trial 2479 pruned.
[I 2021-05-10 07:42:49,126] Trial 2480 pruned.
[I 2021-05-10 07:42:49,677] Trial 2481 pruned.
[I 2021-05-10 07:42:49,975] Trial 2482 pruned.
[I 2021-05-10 07:42:50,519] Trial 2483 pruned.
[I 2021-05-10 07:42:51,072] Trial 2484 pruned.
[I 2021-05-10 07:42:51,613] Trial 2485 pruned.
[I 2021-05-10 07:42:52,161] Trial 2486 pruned.
[I 2021-05-10 07:42:52,720] Trial 2487 pruned.
[I 2021-05-10 07:42:53,270] Trial 2488 pruned.
[I 2021-05-10 07:42:53,810] Trial 2489 pruned.
[I 2021-05-10 07:42:54,365] Trial 2490 pruned.
[I 2021-05-10 07:42:54,914] Trial 2491 pruned.
[I 2021-05-10 07:42:55,459] Trial 2492 pruned.
[I 2021-05-10 07:42:55,996] Trial 2493 pruned.
[I 2021-05-10 07:42:56,540] Trial 2494 pruned.
[I 2021-05-10 07:42:57,085] Trial 2495 pruned.
[I 2021-05-10 07:42:57,616] Trial 2496 pruned.
[I 2021-05-10 07:42:58,161] Trial 2497 pruned.
[I 2021-05-10 07:42:58,334] Trial 2498 pruned.
[I 2021-05-10 07:42:58,879] Trial 2499 pruned.
[I 2021-05-10 07:42:59,424] Trial 2500 pruned.
[I 2021-05-10 07:42:59,973] Trial 2501 pruned.
[I 2021-05-10 07:43:00,520] Trial 2502 pruned.
[I 2021-05-10 07:43:02,081] Trial 2503 pruned.
[I 2021-05-10 07:43:02,624] Trial 2504 pruned.
[I 2021-05-10 07:43:03,169] Trial 2505 pruned.
[I 2021-05-10 07:43:03,719] Trial 2506 pruned.
[I 2021-05-10 07:43:04,268] Trial 2507 pruned.
[I 2021-05-10 07:43:04,811] Trial 2508 pruned.
[I 2021-05-10 07:43:04,923] Trial 2509 pruned.
[I 2021-05-10 07:43:05,468] Trial 2510 pruned.
[I 2021-05-10 07:43:05,764] Trial 2511 pruned.
[I 2021-05-10 07:43:06,310] Trial 2512 pruned.
[I 2021-05-10 07:43:06,855] Trial 2513 pruned.
[I 2021-05-10 07:43:07,399] Trial 2514 pruned.
[I 2021-05-10 07:43:07,947] Trial 2515 pruned.
[I 2021-05-10 07:43:08,493] Trial 2516 pruned.
[I 2021-05-10 07:43:09,016] Trial 2517 pruned.
[I 2021-05-10 07:43:09,567] Trial 2518 pruned.
[I 2021-05-10 07:43:10,119] Trial 2519 pruned.
[I 2021-05-10 07:43:10,670] Trial 2520 pruned.
[I 2021-05-10 07:43:11,220] Trial 2521 pruned.
[I 2021-05-10 07:43:11,760] Trial 2522 pruned.
[I 2021-05-10 07:43:12,306] Trial 2523 pruned.
[I 2021-05-10 07:43:12,854] Trial 2524 pruned.
[I 2021-05-10 07:43:13,405] Trial 2525 pruned.
[I 2021-05-10 07:43:13,950] Trial 2526 pruned.
[I 2021-05-10 07:43:14,496] Trial 2527 pruned.
[I 2021-05-10 07:43:15,040] Trial 2528 pruned.
[I 2021-05-10 07:43:15,214] Trial 2529 pruned.
[I 2021-05-10 07:43:15,760] Trial 2530 pruned.
[I 2021-05-10 07:43:16,303] Trial 2531 pruned.
[I 2021-05-10 07:43:16,847] Trial 2532 pruned.
[I 2021-05-10 07:43:17,396] Trial 2533 pruned.
[I 2021-05-10 07:43:17,945] Trial 2534 pruned.
[I 2021-05-10 07:43:18,497] Trial 2535 pruned.
[I 2021-05-10 07:43:19,046] Trial 2536 pruned.
[I 2021-05-10 07:43:19,590] Trial 2537 pruned.
[I 2021-05-10 07:43:20,134] Trial 2538 pruned.
[I 2021-05-10 07:43:20,429] Trial 2539 pruned.
[I 2021-05-10 07:43:20,978] Trial 2540 pruned.
[I 2021-05-10 07:43:21,090] Trial 2541 pruned.
[I 2021-05-10 07:43:21,632] Trial 2542 pruned.
[I 2021-05-10 07:43:22,176] Trial 2543 pruned.
[I 2021-05-10 07:43:22,723] Trial 2544 pruned.
[I 2021-05-10 07:43:24,307] Trial 2545 pruned.
[I 2021-05-10 07:43:24,853] Trial 2546 pruned.
[I 2021-05-10 07:43:25,397] Trial 2547 pruned.
[I 2021-05-10 07:43:25,954] Trial 2548 pruned.
[I 2021-05-10 07:43:26,496] Trial 2549 pruned.
[I 2021-05-10 07:43:27,044] Trial 2550 pruned.
[I 2021-05-10 07:43:27,602] Trial 2551 pruned.
[I 2021-05-10 07:43:28,159] Trial 2552 pruned.
[I 2021-05-10 07:43:28,708] Trial 2553 pruned.
[I 2021-05-10 07:43:29,262] Trial 2554 pruned.
[I 2021-05-10 07:44:20,554] Trial 2555 finished with value: 379.15399169921875 and parameters: {'lr': 0.004604750711964362, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:44:20,727] Trial 2556 pruned.
[I 2021-05-10 07:44:21,284] Trial 2557 pruned.
[I 2021-05-10 07:44:21,839] Trial 2558 pruned.
[I 2021-05-10 07:44:22,384] Trial 2559 pruned.
[I 2021-05-10 07:44:22,937] Trial 2560 pruned.
[I 2021-05-10 07:44:23,491] Trial 2561 pruned.
[I 2021-05-10 07:44:24,038] Trial 2562 pruned.
[I 2021-05-10 07:44:24,598] Trial 2563 pruned.
[I 2021-05-10 07:44:25,154] Trial 2564 pruned.
[I 2021-05-10 07:44:25,706] Trial 2565 pruned.
[I 2021-05-10 07:44:26,264] Trial 2566 pruned.
[I 2021-05-10 07:44:26,377] Trial 2567 pruned.
[I 2021-05-10 07:44:27,932] Trial 2568 pruned.
[I 2021-05-10 07:44:28,487] Trial 2569 pruned.
[I 2021-05-10 07:44:29,035] Trial 2570 pruned.
[I 2021-05-10 07:44:29,330] Trial 2571 pruned.
[I 2021-05-10 07:44:29,887] Trial 2572 pruned.
[I 2021-05-10 07:44:30,441] Trial 2573 pruned.
[I 2021-05-10 07:44:30,979] Trial 2574 pruned.
[I 2021-05-10 07:44:31,536] Trial 2575 pruned.
[I 2021-05-10 07:44:32,092] Trial 2576 pruned.
[I 2021-05-10 07:44:32,641] Trial 2577 pruned.
[I 2021-05-10 07:44:33,185] Trial 2578 pruned.
[I 2021-05-10 07:44:33,741] Trial 2579 pruned.
[I 2021-05-10 07:44:34,297] Trial 2580 pruned.
[I 2021-05-10 07:44:34,852] Trial 2581 pruned.
[I 2021-05-10 07:44:35,407] Trial 2582 pruned.
[I 2021-05-10 07:44:35,952] Trial 2583 pruned.
[I 2021-05-10 07:44:36,509] Trial 2584 pruned.
[I 2021-05-10 07:44:36,687] Trial 2585 pruned.
[I 2021-05-10 07:44:37,231] Trial 2586 pruned.
[I 2021-05-10 07:44:37,787] Trial 2587 pruned.
[I 2021-05-10 07:44:38,347] Trial 2588 pruned.
[I 2021-05-10 07:44:38,897] Trial 2589 pruned.
[I 2021-05-10 07:44:39,456] Trial 2590 pruned.
[I 2021-05-10 07:44:40,014] Trial 2591 pruned.
[I 2021-05-10 07:44:40,558] Trial 2592 pruned.
[I 2021-05-10 07:44:41,113] Trial 2593 pruned.
[I 2021-05-10 07:44:41,666] Trial 2594 pruned.
[I 2021-05-10 07:44:42,212] Trial 2595 pruned.
[I 2021-05-10 07:44:42,764] Trial 2596 pruned.
[I 2021-05-10 07:44:42,877] Trial 2597 pruned.
[I 2021-05-10 07:44:43,436] Trial 2598 pruned.
[I 2021-05-10 07:44:43,751] Trial 2599 pruned.
[I 2021-05-10 07:44:44,314] Trial 2600 pruned.
[I 2021-05-10 07:44:44,862] Trial 2601 pruned.
[I 2021-05-10 07:44:45,413] Trial 2602 pruned.
[I 2021-05-10 07:44:45,967] Trial 2603 pruned.
[I 2021-05-10 07:44:46,525] Trial 2604 pruned.
[I 2021-05-10 07:44:47,076] Trial 2605 pruned.
[I 2021-05-10 07:44:47,634] Trial 2606 pruned.
[I 2021-05-10 07:44:48,185] Trial 2607 pruned.
[I 2021-05-10 07:44:48,748] Trial 2608 pruned.
[I 2021-05-10 07:44:49,307] Trial 2609 pruned.
[I 2021-05-10 07:44:49,853] Trial 2610 pruned.
[I 2021-05-10 07:44:50,407] Trial 2611 pruned.
[I 2021-05-10 07:44:50,962] Trial 2612 pruned.
[I 2021-05-10 07:44:51,513] Trial 2613 pruned.
[I 2021-05-10 07:44:51,691] Trial 2614 pruned.
[I 2021-05-10 07:44:52,246] Trial 2615 pruned.
[I 2021-05-10 07:44:52,802] Trial 2616 pruned.
[I 2021-05-10 07:44:53,363] Trial 2617 pruned.
[I 2021-05-10 07:44:53,920] Trial 2618 pruned.
[I 2021-05-10 07:44:54,475] Trial 2619 pruned.
[I 2021-05-10 07:44:55,034] Trial 2620 pruned.
[I 2021-05-10 07:44:55,592] Trial 2621 pruned.
[I 2021-05-10 07:44:56,139] Trial 2622 pruned.
[I 2021-05-10 07:44:56,698] Trial 2623 pruned.
[I 2021-05-10 07:44:57,242] Trial 2624 pruned.
[I 2021-05-10 07:44:57,356] Trial 2625 pruned.
[I 2021-05-10 07:44:57,915] Trial 2626 pruned.
[I 2021-05-10 07:44:58,461] Trial 2627 pruned.
[I 2021-05-10 07:44:59,013] Trial 2628 pruned.
[I 2021-05-10 07:44:59,568] Trial 2629 pruned.
[I 2021-05-10 07:45:00,128] Trial 2630 pruned.
[I 2021-05-10 07:45:00,429] Trial 2631 pruned.
[I 2021-05-10 07:45:00,988] Trial 2632 pruned.
[I 2021-05-10 07:45:01,546] Trial 2633 pruned.
[I 2021-05-10 07:45:02,088] Trial 2634 pruned.
[I 2021-05-10 07:45:02,644] Trial 2635 pruned.
[I 2021-05-10 07:45:03,203] Trial 2636 pruned.
[I 2021-05-10 07:45:03,755] Trial 2637 pruned.
[I 2021-05-10 07:45:04,311] Trial 2638 pruned.
[I 2021-05-10 07:45:04,865] Trial 2639 pruned.
[I 2021-05-10 07:45:06,425] Trial 2640 pruned.
[I 2021-05-10 07:45:06,982] Trial 2641 pruned.
[I 2021-05-10 07:45:07,535] Trial 2642 pruned.
[I 2021-05-10 07:45:08,089] Trial 2643 pruned.
[I 2021-05-10 07:45:08,647] Trial 2644 pruned.
[I 2021-05-10 07:45:09,202] Trial 2645 pruned.
[I 2021-05-10 07:45:09,377] Trial 2646 pruned.
[I 2021-05-10 07:45:09,931] Trial 2647 pruned.
[I 2021-05-10 07:45:10,492] Trial 2648 pruned.
[I 2021-05-10 07:45:12,049] Trial 2649 pruned.
[I 2021-05-10 07:45:12,604] Trial 2650 pruned.
[I 2021-05-10 07:45:13,154] Trial 2651 pruned.
[I 2021-05-10 07:45:13,705] Trial 2652 pruned.
[I 2021-05-10 07:45:14,252] Trial 2653 pruned.
[I 2021-05-10 07:45:14,808] Trial 2654 pruned.
[I 2021-05-10 07:45:14,921] Trial 2655 pruned.
[I 2021-05-10 07:45:15,477] Trial 2656 pruned.
[I 2021-05-10 07:45:16,036] Trial 2657 pruned.
[I 2021-05-10 07:45:16,577] Trial 2658 pruned.
[I 2021-05-10 07:45:17,132] Trial 2659 pruned.
[I 2021-05-10 07:45:17,691] Trial 2660 pruned.
[I 2021-05-10 07:45:17,987] Trial 2661 pruned.
[I 2021-05-10 07:45:18,543] Trial 2662 pruned.
[I 2021-05-10 07:45:19,102] Trial 2663 pruned.
[I 2021-05-10 07:45:19,652] Trial 2664 pruned.
[I 2021-05-10 07:45:21,230] Trial 2665 pruned.
[I 2021-05-10 07:45:21,789] Trial 2666 pruned.
[I 2021-05-10 07:45:22,337] Trial 2667 pruned.
[I 2021-05-10 07:45:22,895] Trial 2668 pruned.
[I 2021-05-10 07:45:23,457] Trial 2669 pruned.
[I 2021-05-10 07:45:24,006] Trial 2670 pruned.
[I 2021-05-10 07:45:24,567] Trial 2671 pruned.
[I 2021-05-10 07:45:25,126] Trial 2672 pruned.
[I 2021-05-10 07:45:25,305] Trial 2673 pruned.
[I 2021-05-10 07:45:25,859] Trial 2674 pruned.
[I 2021-05-10 07:45:26,414] Trial 2675 pruned.
[I 2021-05-10 07:45:26,961] Trial 2676 pruned.
[I 2021-05-10 07:45:27,518] Trial 2677 pruned.
[I 2021-05-10 07:45:28,061] Trial 2678 pruned.
[I 2021-05-10 07:45:28,607] Trial 2679 pruned.
[I 2021-05-10 07:45:29,165] Trial 2680 pruned.
[I 2021-05-10 07:45:29,717] Trial 2681 pruned.
[I 2021-05-10 07:45:30,265] Trial 2682 pruned.
[I 2021-05-10 07:45:30,826] Trial 2683 pruned.
[I 2021-05-10 07:45:30,943] Trial 2684 pruned.
[I 2021-05-10 07:45:31,496] Trial 2685 pruned.
[I 2021-05-10 07:45:31,797] Trial 2686 pruned.
[I 2021-05-10 07:45:32,358] Trial 2687 pruned.
[I 2021-05-10 07:45:32,912] Trial 2688 pruned.
[I 2021-05-10 07:45:33,472] Trial 2689 pruned.
[I 2021-05-10 07:45:34,030] Trial 2690 pruned.
[I 2021-05-10 07:45:34,585] Trial 2691 pruned.
[I 2021-05-10 07:45:35,142] Trial 2692 pruned.
[I 2021-05-10 07:45:35,702] Trial 2693 pruned.
[I 2021-05-10 07:45:36,256] Trial 2694 pruned.
[I 2021-05-10 07:45:36,815] Trial 2695 pruned.
[I 2021-05-10 07:45:37,371] Trial 2696 pruned.
[I 2021-05-10 07:45:37,925] Trial 2697 pruned.
[I 2021-05-10 07:45:39,507] Trial 2698 pruned.
[I 2021-05-10 07:45:40,065] Trial 2699 pruned.
[I 2021-05-10 07:45:40,612] Trial 2700 pruned.
[I 2021-05-10 07:45:41,167] Trial 2701 pruned.
[I 2021-05-10 07:45:41,728] Trial 2702 pruned.
[I 2021-05-10 07:45:41,905] Trial 2703 pruned.
[I 2021-05-10 07:45:42,465] Trial 2704 pruned.
[I 2021-05-10 07:45:43,023] Trial 2705 pruned.
[I 2021-05-10 07:45:43,562] Trial 2706 pruned.
[I 2021-05-10 07:45:44,111] Trial 2707 pruned.
[I 2021-05-10 07:45:44,672] Trial 2708 pruned.
[I 2021-05-10 07:45:45,222] Trial 2709 pruned.
[I 2021-05-10 07:45:45,779] Trial 2710 pruned.
[I 2021-05-10 07:45:46,337] Trial 2711 pruned.
[I 2021-05-10 07:45:46,881] Trial 2712 pruned.
[I 2021-05-10 07:45:46,997] Trial 2713 pruned.
[I 2021-05-10 07:45:47,555] Trial 2714 pruned.
[I 2021-05-10 07:45:48,101] Trial 2715 pruned.
[I 2021-05-10 07:45:48,406] Trial 2716 pruned.
[I 2021-05-10 07:45:48,965] Trial 2717 pruned.
[I 2021-05-10 07:45:49,516] Trial 2718 pruned.
[I 2021-05-10 07:45:50,075] Trial 2719 pruned.
[I 2021-05-10 07:45:50,632] Trial 2720 pruned.
[I 2021-05-10 07:45:51,185] Trial 2721 pruned.
[I 2021-05-10 07:45:51,745] Trial 2722 pruned.
[I 2021-05-10 07:45:52,302] Trial 2723 pruned.
[I 2021-05-10 07:45:53,866] Trial 2724 pruned.
[I 2021-05-10 07:45:54,424] Trial 2725 pruned.
[I 2021-05-10 07:45:54,987] Trial 2726 pruned.
[I 2021-05-10 07:45:55,543] Trial 2727 pruned.
[I 2021-05-10 07:45:56,102] Trial 2728 pruned.
[I 2021-05-10 07:45:56,660] Trial 2729 pruned.
[I 2021-05-10 07:45:57,219] Trial 2730 pruned.
[I 2021-05-10 07:45:57,400] Trial 2731 pruned.
[I 2021-05-10 07:45:57,960] Trial 2732 pruned.
[I 2021-05-10 07:45:58,505] Trial 2733 pruned.
[I 2021-05-10 07:45:59,057] Trial 2734 pruned.
[I 2021-05-10 07:45:59,604] Trial 2735 pruned.
[I 2021-05-10 07:46:00,158] Trial 2736 pruned.
[I 2021-05-10 07:46:00,720] Trial 2737 pruned.
[I 2021-05-10 07:46:01,274] Trial 2738 pruned.
[I 2021-05-10 07:46:01,811] Trial 2739 pruned.
[I 2021-05-10 07:46:02,373] Trial 2740 pruned.
[I 2021-05-10 07:46:02,932] Trial 2741 pruned.
[I 2021-05-10 07:46:03,048] Trial 2742 pruned.
[I 2021-05-10 07:46:03,604] Trial 2743 pruned.
[I 2021-05-10 07:46:03,911] Trial 2744 pruned.
[I 2021-05-10 07:46:04,460] Trial 2745 pruned.
[I 2021-05-10 07:46:05,023] Trial 2746 pruned.
[I 2021-05-10 07:46:05,581] Trial 2747 pruned.
[I 2021-05-10 07:46:06,135] Trial 2748 pruned.
[I 2021-05-10 07:46:06,711] Trial 2749 pruned.
[I 2021-05-10 07:46:07,272] Trial 2750 pruned.
[I 2021-05-10 07:46:07,819] Trial 2751 pruned.
[I 2021-05-10 07:46:08,378] Trial 2752 pruned.
[I 2021-05-10 07:46:08,938] Trial 2753 pruned.
[I 2021-05-10 07:46:09,490] Trial 2754 pruned.
[I 2021-05-10 07:46:10,045] Trial 2755 pruned.
[I 2021-05-10 07:46:11,622] Trial 2756 pruned.
[I 2021-05-10 07:46:12,169] Trial 2757 pruned.
[I 2021-05-10 07:46:12,727] Trial 2758 pruned.
[I 2021-05-10 07:46:13,285] Trial 2759 pruned.
[I 2021-05-10 07:46:13,460] Trial 2760 pruned.
[I 2021-05-10 07:46:14,020] Trial 2761 pruned.
[I 2021-05-10 07:46:14,569] Trial 2762 pruned.
[I 2021-05-10 07:46:15,123] Trial 2763 pruned.
[I 2021-05-10 07:46:15,685] Trial 2764 pruned.
[I 2021-05-10 07:46:16,239] Trial 2765 pruned.
[I 2021-05-10 07:46:16,793] Trial 2766 pruned.
[I 2021-05-10 07:46:17,346] Trial 2767 pruned.
[I 2021-05-10 07:46:17,906] Trial 2768 pruned.
[I 2021-05-10 07:46:18,455] Trial 2769 pruned.
[I 2021-05-10 07:46:19,012] Trial 2770 pruned.
[I 2021-05-10 07:46:19,130] Trial 2771 pruned.
[I 2021-05-10 07:46:19,677] Trial 2772 pruned.
[I 2021-05-10 07:46:19,985] Trial 2773 pruned.
[I 2021-05-10 07:46:20,542] Trial 2774 pruned.
[I 2021-05-10 07:46:21,093] Trial 2775 pruned.
[I 2021-05-10 07:46:21,649] Trial 2776 pruned.
[I 2021-05-10 07:46:22,208] Trial 2777 pruned.
[I 2021-05-10 07:46:22,756] Trial 2778 pruned.
[I 2021-05-10 07:46:23,319] Trial 2779 pruned.
[I 2021-05-10 07:46:23,881] Trial 2780 pruned.
[I 2021-05-10 07:46:25,436] Trial 2781 pruned.
[I 2021-05-10 07:46:25,993] Trial 2782 pruned.
[I 2021-05-10 07:46:26,553] Trial 2783 pruned.
[I 2021-05-10 07:46:27,106] Trial 2784 pruned.
[I 2021-05-10 07:46:27,663] Trial 2785 pruned.
[I 2021-05-10 07:46:28,220] Trial 2786 pruned.
[I 2021-05-10 07:46:28,774] Trial 2787 pruned.
[I 2021-05-10 07:46:29,331] Trial 2788 pruned.
[I 2021-05-10 07:46:29,511] Trial 2789 pruned.
[I 2021-05-10 07:46:30,066] Trial 2790 pruned.
[I 2021-05-10 07:46:30,623] Trial 2791 pruned.
[I 2021-05-10 07:46:31,183] Trial 2792 pruned.
[I 2021-05-10 07:46:32,740] Trial 2793 pruned.
[I 2021-05-10 07:46:33,288] Trial 2794 pruned.
[I 2021-05-10 07:46:33,850] Trial 2795 pruned.
[I 2021-05-10 07:46:34,405] Trial 2796 pruned.
[I 2021-05-10 07:46:34,965] Trial 2797 pruned.
[I 2021-05-10 07:46:35,522] Trial 2798 pruned.
[I 2021-05-10 07:46:36,071] Trial 2799 pruned.
[I 2021-05-10 07:46:36,188] Trial 2800 pruned.
[I 2021-05-10 07:46:36,749] Trial 2801 pruned.
[I 2021-05-10 07:46:37,298] Trial 2802 pruned.
[I 2021-05-10 07:46:37,860] Trial 2803 pruned.
[I 2021-05-10 07:46:38,418] Trial 2804 pruned.
[I 2021-05-10 07:46:38,968] Trial 2805 pruned.
[I 2021-05-10 07:46:39,276] Trial 2806 pruned.
[I 2021-05-10 07:46:39,837] Trial 2807 pruned.
[I 2021-05-10 07:46:40,393] Trial 2808 pruned.
[I 2021-05-10 07:46:40,953] Trial 2809 pruned.
[I 2021-05-10 07:46:41,514] Trial 2810 pruned.
[I 2021-05-10 07:46:42,068] Trial 2811 pruned.
[I 2021-05-10 07:46:42,625] Trial 2812 pruned.
[I 2021-05-10 07:46:43,185] Trial 2813 pruned.
[I 2021-05-10 07:46:43,737] Trial 2814 pruned.
[I 2021-05-10 07:46:44,296] Trial 2815 pruned.
[I 2021-05-10 07:47:34,638] Trial 2816 finished with value: 449.55841064453125 and parameters: {'lr': 0.004657957231828999, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:47:35,179] Trial 2817 pruned.
[I 2021-05-10 07:47:35,742] Trial 2818 pruned.
[I 2021-05-10 07:47:36,302] Trial 2819 pruned.
[I 2021-05-10 07:47:36,480] Trial 2820 pruned.
[I 2021-05-10 07:47:37,034] Trial 2821 pruned.
[I 2021-05-10 07:47:37,592] Trial 2822 pruned.
[I 2021-05-10 07:47:38,150] Trial 2823 pruned.
[I 2021-05-10 07:47:38,712] Trial 2824 pruned.
[I 2021-05-10 07:47:39,273] Trial 2825 pruned.
[I 2021-05-10 07:47:39,821] Trial 2826 pruned.
[I 2021-05-10 07:47:40,380] Trial 2827 pruned.
[I 2021-05-10 07:47:40,942] Trial 2828 pruned.
[I 2021-05-10 07:47:41,060] Trial 2829 pruned.
[I 2021-05-10 07:47:41,617] Trial 2830 pruned.
[I 2021-05-10 07:47:42,175] Trial 2831 pruned.
[I 2021-05-10 07:47:42,723] Trial 2832 pruned.
[I 2021-05-10 07:47:43,283] Trial 2833 pruned.
[I 2021-05-10 07:47:43,843] Trial 2834 pruned.
[I 2021-05-10 07:47:44,149] Trial 2835 pruned.
[I 2021-05-10 07:47:44,705] Trial 2836 pruned.
[I 2021-05-10 07:47:45,269] Trial 2837 pruned.
[I 2021-05-10 07:47:45,826] Trial 2838 pruned.
[I 2021-05-10 07:47:46,386] Trial 2839 pruned.
[I 2021-05-10 07:47:46,949] Trial 2840 pruned.
[I 2021-05-10 07:47:47,507] Trial 2841 pruned.
[I 2021-05-10 07:47:48,068] Trial 2842 pruned.
[I 2021-05-10 07:47:48,606] Trial 2843 pruned.
[I 2021-05-10 07:47:49,155] Trial 2844 pruned.
[I 2021-05-10 07:47:49,716] Trial 2845 pruned.
[I 2021-05-10 07:47:50,272] Trial 2846 pruned.
[I 2021-05-10 07:47:50,453] Trial 2847 pruned.
[I 2021-05-10 07:47:51,015] Trial 2848 pruned.
[I 2021-05-10 07:47:51,564] Trial 2849 pruned.
[I 2021-05-10 07:47:52,116] Trial 2850 pruned.
[I 2021-05-10 07:47:52,678] Trial 2851 pruned.
[I 2021-05-10 07:47:53,245] Trial 2852 pruned.
[I 2021-05-10 07:47:53,799] Trial 2853 pruned.
[I 2021-05-10 07:47:54,361] Trial 2854 pruned.
[I 2021-05-10 07:47:54,921] Trial 2855 pruned.
[I 2021-05-10 07:47:55,472] Trial 2856 pruned.
[I 2021-05-10 07:47:56,035] Trial 2857 pruned.
[I 2021-05-10 07:47:56,591] Trial 2858 pruned.
[I 2021-05-10 07:47:56,709] Trial 2859 pruned.
[I 2021-05-10 07:47:57,274] Trial 2860 pruned.
[I 2021-05-10 07:47:57,583] Trial 2861 pruned.
[I 2021-05-10 07:47:58,137] Trial 2862 pruned.
[I 2021-05-10 07:47:58,695] Trial 2863 pruned.
[I 2021-05-10 07:48:49,991] Trial 2864 finished with value: 366.45880126953125 and parameters: {'lr': 0.0036757525364016167, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:48:50,551] Trial 2865 pruned.
[I 2021-05-10 07:48:51,108] Trial 2866 pruned.
[I 2021-05-10 07:48:51,666] Trial 2867 pruned.
[I 2021-05-10 07:48:52,230] Trial 2868 pruned.
[I 2021-05-10 07:48:52,796] Trial 2869 pruned.
[I 2021-05-10 07:48:53,363] Trial 2870 pruned.
[I 2021-05-10 07:48:53,926] Trial 2871 pruned.
[I 2021-05-10 07:48:54,498] Trial 2872 pruned.
[I 2021-05-10 07:48:55,057] Trial 2873 pruned.
[I 2021-05-10 07:48:55,619] Trial 2874 pruned.
[I 2021-05-10 07:49:47,015] Trial 2875 finished with value: 369.76727294921875 and parameters: {'lr': 0.0037980766560896263, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:49:47,577] Trial 2876 pruned.
[I 2021-05-10 07:49:48,132] Trial 2877 pruned.
[I 2021-05-10 07:49:48,315] Trial 2878 pruned.
[I 2021-05-10 07:49:48,878] Trial 2879 pruned.
[I 2021-05-10 07:49:49,438] Trial 2880 pruned.
[I 2021-05-10 07:49:51,018] Trial 2881 pruned.
[I 2021-05-10 07:49:51,578] Trial 2882 pruned.
[I 2021-05-10 07:49:52,141] Trial 2883 pruned.
[I 2021-05-10 07:49:52,706] Trial 2884 pruned.
[I 2021-05-10 07:49:53,268] Trial 2885 pruned.
[I 2021-05-10 07:49:53,831] Trial 2886 pruned.
[I 2021-05-10 07:49:54,392] Trial 2887 pruned.
[I 2021-05-10 07:49:54,956] Trial 2888 pruned.
[I 2021-05-10 07:49:55,075] Trial 2889 pruned.
[I 2021-05-10 07:49:55,637] Trial 2890 pruned.
[I 2021-05-10 07:49:56,465] Trial 2891 pruned.
[I 2021-05-10 07:49:57,030] Trial 2892 pruned.
[I 2021-05-10 07:49:57,596] Trial 2893 pruned.
[I 2021-05-10 07:49:58,158] Trial 2894 pruned.
[I 2021-05-10 07:49:58,723] Trial 2895 pruned.
[I 2021-05-10 07:49:59,285] Trial 2896 pruned.
[I 2021-05-10 07:49:59,849] Trial 2897 pruned.
[I 2021-05-10 07:50:00,412] Trial 2898 pruned.
[I 2021-05-10 07:50:00,954] Trial 2899 pruned.
[I 2021-05-10 07:50:01,513] Trial 2900 pruned.
[I 2021-05-10 07:50:02,078] Trial 2901 pruned.
[I 2021-05-10 07:50:53,395] Trial 2902 finished with value: 373.48193359375 and parameters: {'lr': 0.003023858855109976, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:50:53,962] Trial 2903 pruned.
[I 2021-05-10 07:50:54,525] Trial 2904 pruned.
[I 2021-05-10 07:50:55,085] Trial 2905 pruned.
[I 2021-05-10 07:50:55,649] Trial 2906 pruned.
[I 2021-05-10 07:50:56,211] Trial 2907 pruned.
[I 2021-05-10 07:50:56,772] Trial 2908 pruned.
[I 2021-05-10 07:50:56,954] Trial 2909 pruned.
[I 2021-05-10 07:50:57,521] Trial 2910 pruned.
[I 2021-05-10 07:50:58,082] Trial 2911 pruned.
[I 2021-05-10 07:50:58,641] Trial 2912 pruned.
[I 2021-05-10 07:50:59,204] Trial 2913 pruned.
[I 2021-05-10 07:50:59,768] Trial 2914 pruned.
[I 2021-05-10 07:51:00,330] Trial 2915 pruned.
[I 2021-05-10 07:51:00,891] Trial 2916 pruned.
[I 2021-05-10 07:51:01,455] Trial 2917 pruned.
[I 2021-05-10 07:51:02,014] Trial 2918 pruned.
[I 2021-05-10 07:51:02,135] Trial 2919 pruned.
[I 2021-05-10 07:51:02,703] Trial 2920 pruned.
[I 2021-05-10 07:51:03,009] Trial 2921 pruned.
[I 2021-05-10 07:51:03,570] Trial 2922 pruned.
[I 2021-05-10 07:51:04,130] Trial 2923 pruned.
[I 2021-05-10 07:51:04,694] Trial 2924 pruned.
[I 2021-05-10 07:51:05,248] Trial 2925 pruned.
[I 2021-05-10 07:51:05,813] Trial 2926 pruned.
[I 2021-05-10 07:51:06,380] Trial 2927 pruned.
[I 2021-05-10 07:51:06,940] Trial 2928 pruned.
[I 2021-05-10 07:51:07,494] Trial 2929 pruned.
[I 2021-05-10 07:51:08,057] Trial 2930 pruned.
[I 2021-05-10 07:51:08,604] Trial 2931 pruned.
[I 2021-05-10 07:51:09,165] Trial 2932 pruned.
[I 2021-05-10 07:51:09,727] Trial 2933 pruned.
[I 2021-05-10 07:51:10,295] Trial 2934 pruned.
[I 2021-05-10 07:51:10,859] Trial 2935 pruned.
[I 2021-05-10 07:51:11,421] Trial 2936 pruned.
[I 2021-05-10 07:51:11,604] Trial 2937 pruned.
[I 2021-05-10 07:51:12,167] Trial 2938 pruned.
[I 2021-05-10 07:51:12,731] Trial 2939 pruned.
[I 2021-05-10 07:51:13,297] Trial 2940 pruned.
[I 2021-05-10 07:51:13,863] Trial 2941 pruned.
[I 2021-05-10 07:51:14,430] Trial 2942 pruned.
[I 2021-05-10 07:51:14,992] Trial 2943 pruned.
[I 2021-05-10 07:51:15,555] Trial 2944 pruned.
[I 2021-05-10 07:51:16,116] Trial 2945 pruned.
[I 2021-05-10 07:51:16,681] Trial 2946 pruned.
[I 2021-05-10 07:51:17,247] Trial 2947 pruned.
[I 2021-05-10 07:51:18,832] Trial 2948 pruned.
[I 2021-05-10 07:51:18,954] Trial 2949 pruned.
[I 2021-05-10 07:51:19,516] Trial 2950 pruned.
[I 2021-05-10 07:51:19,827] Trial 2951 pruned.
[I 2021-05-10 07:51:20,381] Trial 2952 pruned.
[I 2021-05-10 07:51:20,946] Trial 2953 pruned.
[I 2021-05-10 07:51:21,498] Trial 2954 pruned.
[I 2021-05-10 07:51:22,062] Trial 2955 pruned.
[I 2021-05-10 07:51:22,622] Trial 2956 pruned.
[I 2021-05-10 07:51:23,187] Trial 2957 pruned.
[I 2021-05-10 07:51:23,756] Trial 2958 pruned.
[I 2021-05-10 07:51:24,321] Trial 2959 pruned.
[I 2021-05-10 07:51:24,878] Trial 2960 pruned.
[I 2021-05-10 07:51:25,440] Trial 2961 pruned.
[I 2021-05-10 07:51:26,004] Trial 2962 pruned.
[I 2021-05-10 07:51:26,566] Trial 2963 pruned.
[I 2021-05-10 07:51:27,129] Trial 2964 pruned.
[I 2021-05-10 07:51:27,694] Trial 2965 pruned.
[I 2021-05-10 07:51:28,259] Trial 2966 pruned.
[I 2021-05-10 07:51:28,444] Trial 2967 pruned.
[I 2021-05-10 07:51:29,008] Trial 2968 pruned.
[I 2021-05-10 07:51:29,575] Trial 2969 pruned.
[I 2021-05-10 07:51:30,138] Trial 2970 pruned.
[I 2021-05-10 07:51:30,702] Trial 2971 pruned.
[I 2021-05-10 07:51:31,265] Trial 2972 pruned.
[I 2021-05-10 07:51:31,829] Trial 2973 pruned.
[I 2021-05-10 07:51:32,394] Trial 2974 pruned.
[I 2021-05-10 07:51:32,958] Trial 2975 pruned.
[I 2021-05-10 07:51:33,519] Trial 2976 pruned.
[I 2021-05-10 07:51:33,639] Trial 2977 pruned.
[I 2021-05-10 07:51:34,202] Trial 2978 pruned.
[I 2021-05-10 07:51:34,765] Trial 2979 pruned.
[I 2021-05-10 07:51:35,317] Trial 2980 pruned.
[I 2021-05-10 07:52:25,074] Trial 2981 pruned.
[I 2021-05-10 07:52:25,623] Trial 2982 pruned.
[I 2021-05-10 07:52:25,936] Trial 2983 pruned.
[I 2021-05-10 07:52:26,500] Trial 2984 pruned.
[I 2021-05-10 07:52:27,056] Trial 2985 pruned.
[I 2021-05-10 07:52:27,619] Trial 2986 pruned.
[I 2021-05-10 07:52:28,183] Trial 2987 pruned.
[I 2021-05-10 07:52:28,744] Trial 2988 pruned.
[I 2021-05-10 07:52:29,310] Trial 2989 pruned.
[I 2021-05-10 07:52:29,875] Trial 2990 pruned.
[I 2021-05-10 07:52:30,450] Trial 2991 pruned.
[I 2021-05-10 07:52:31,015] Trial 2992 pruned.
[I 2021-05-10 07:52:31,580] Trial 2993 pruned.
[I 2021-05-10 07:52:32,145] Trial 2994 pruned.
[I 2021-05-10 07:52:32,329] Trial 2995 pruned.
[I 2021-05-10 07:52:32,892] Trial 2996 pruned.
[I 2021-05-10 07:52:33,458] Trial 2997 pruned.
[I 2021-05-10 07:52:34,023] Trial 2998 pruned.
[I 2021-05-10 07:52:34,589] Trial 2999 pruned.
[I 2021-05-10 07:52:35,174] Trial 3000 pruned.
[I 2021-05-10 07:52:35,757] Trial 3001 pruned.
[I 2021-05-10 07:52:36,326] Trial 3002 pruned.
[I 2021-05-10 07:52:36,891] Trial 3003 pruned.
[I 2021-05-10 07:52:37,457] Trial 3004 pruned.
[I 2021-05-10 07:52:38,023] Trial 3005 pruned.
[I 2021-05-10 07:52:38,575] Trial 3006 pruned.
[I 2021-05-10 07:52:38,882] Trial 3007 pruned.
[I 2021-05-10 07:52:39,448] Trial 3008 pruned.
[I 2021-05-10 07:52:39,569] Trial 3009 pruned.
[I 2021-05-10 07:52:40,131] Trial 3010 pruned.
[I 2021-05-10 07:52:40,698] Trial 3011 pruned.
[I 2021-05-10 07:52:41,263] Trial 3012 pruned.
[I 2021-05-10 07:52:41,828] Trial 3013 pruned.
[I 2021-05-10 07:52:42,382] Trial 3014 pruned.
[I 2021-05-10 07:52:42,949] Trial 3015 pruned.
[I 2021-05-10 07:52:43,515] Trial 3016 pruned.
[I 2021-05-10 07:52:44,079] Trial 3017 pruned.
[I 2021-05-10 07:52:44,640] Trial 3018 pruned.
[I 2021-05-10 07:52:45,202] Trial 3019 pruned.
[I 2021-05-10 07:52:45,767] Trial 3020 pruned.
[I 2021-05-10 07:52:46,343] Trial 3021 pruned.
[I 2021-05-10 07:52:46,908] Trial 3022 pruned.
[I 2021-05-10 07:52:47,475] Trial 3023 pruned.
[I 2021-05-10 07:52:47,661] Trial 3024 pruned.
[I 2021-05-10 07:52:48,228] Trial 3025 pruned.
[I 2021-05-10 07:52:48,790] Trial 3026 pruned.
[I 2021-05-10 07:52:49,357] Trial 3027 pruned.
[I 2021-05-10 07:52:49,920] Trial 3028 pruned.
[I 2021-05-10 07:52:50,483] Trial 3029 pruned.
[I 2021-05-10 07:53:41,822] Trial 3030 finished with value: 365.43450927734375 and parameters: {'lr': 0.003626379140197271, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:53:42,387] Trial 3031 pruned.
[I 2021-05-10 07:53:42,951] Trial 3032 pruned.
[I 2021-05-10 07:53:43,520] Trial 3033 pruned.
[I 2021-05-10 07:53:44,085] Trial 3034 pruned.
[I 2021-05-10 07:53:44,644] Trial 3035 pruned.
[I 2021-05-10 07:53:44,765] Trial 3036 pruned.
[I 2021-05-10 07:53:45,327] Trial 3037 pruned.
[I 2021-05-10 07:53:45,891] Trial 3038 pruned.
[I 2021-05-10 07:53:46,204] Trial 3039 pruned.
[I 2021-05-10 07:53:46,766] Trial 3040 pruned.
[I 2021-05-10 07:53:47,323] Trial 3041 pruned.
[I 2021-05-10 07:53:47,884] Trial 3042 pruned.
[I 2021-05-10 07:53:48,447] Trial 3043 pruned.
[I 2021-05-10 07:53:49,005] Trial 3044 pruned.
[I 2021-05-10 07:53:49,558] Trial 3045 pruned.
[I 2021-05-10 07:53:50,125] Trial 3046 pruned.
[I 2021-05-10 07:53:50,692] Trial 3047 pruned.
[I 2021-05-10 07:53:51,247] Trial 3048 pruned.
[I 2021-05-10 07:53:51,813] Trial 3049 pruned.
[I 2021-05-10 07:53:52,379] Trial 3050 pruned.
[I 2021-05-10 07:53:52,936] Trial 3051 pruned.
[I 2021-05-10 07:53:53,500] Trial 3052 pruned.
[I 2021-05-10 07:53:54,067] Trial 3053 pruned.
[I 2021-05-10 07:53:54,249] Trial 3054 pruned.
[I 2021-05-10 07:53:54,816] Trial 3055 pruned.
[I 2021-05-10 07:53:55,378] Trial 3056 pruned.
[I 2021-05-10 07:53:55,941] Trial 3057 pruned.
[I 2021-05-10 07:53:56,492] Trial 3058 pruned.
[I 2021-05-10 07:53:57,058] Trial 3059 pruned.
[I 2021-05-10 07:53:57,620] Trial 3060 pruned.
[I 2021-05-10 07:53:58,164] Trial 3061 pruned.
[I 2021-05-10 07:53:58,734] Trial 3062 pruned.
[I 2021-05-10 07:53:59,291] Trial 3063 pruned.
[I 2021-05-10 07:53:59,847] Trial 3064 pruned.
[I 2021-05-10 07:54:00,413] Trial 3065 pruned.
[I 2021-05-10 07:54:00,535] Trial 3066 pruned.
[I 2021-05-10 07:54:01,099] Trial 3067 pruned.
[I 2021-05-10 07:54:01,406] Trial 3068 pruned.
[I 2021-05-10 07:54:01,959] Trial 3069 pruned.
[I 2021-05-10 07:54:02,525] Trial 3070 pruned.
[I 2021-05-10 07:54:53,003] Trial 3071 finished with value: 395.17138671875 and parameters: {'lr': 0.0038038202251705093, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:54:53,572] Trial 3072 pruned.
[I 2021-05-10 07:54:54,136] Trial 3073 pruned.
[I 2021-05-10 07:54:54,701] Trial 3074 pruned.
[I 2021-05-10 07:54:55,270] Trial 3075 pruned.
[I 2021-05-10 07:54:55,839] Trial 3076 pruned.
[I 2021-05-10 07:54:56,404] Trial 3077 pruned.
[I 2021-05-10 07:54:56,963] Trial 3078 pruned.
[I 2021-05-10 07:54:57,540] Trial 3079 pruned.
[I 2021-05-10 07:54:58,115] Trial 3080 pruned.
[I 2021-05-10 07:54:58,722] Trial 3081 pruned.
[I 2021-05-10 07:54:59,288] Trial 3082 pruned.
[I 2021-05-10 07:54:59,473] Trial 3083 pruned.
[I 2021-05-10 07:55:00,039] Trial 3084 pruned.
[I 2021-05-10 07:55:00,597] Trial 3085 pruned.
[I 2021-05-10 07:55:01,163] Trial 3086 pruned.
[I 2021-05-10 07:55:01,728] Trial 3087 pruned.
[I 2021-05-10 07:55:02,277] Trial 3088 pruned.
[I 2021-05-10 07:55:02,841] Trial 3089 pruned.
[I 2021-05-10 07:55:03,405] Trial 3090 pruned.
[I 2021-05-10 07:55:03,954] Trial 3091 pruned.
[I 2021-05-10 07:55:04,518] Trial 3092 pruned.
[I 2021-05-10 07:55:55,803] Trial 3093 finished with value: 364.67230224609375 and parameters: {'lr': 0.0043201689635501824, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:55:56,369] Trial 3094 pruned.
[I 2021-05-10 07:55:56,495] Trial 3095 pruned.
[I 2021-05-10 07:55:57,049] Trial 3096 pruned.
[I 2021-05-10 07:55:57,361] Trial 3097 pruned.
[I 2021-05-10 07:55:57,923] Trial 3098 pruned.
[I 2021-05-10 07:55:58,484] Trial 3099 pruned.
[I 2021-05-10 07:56:00,075] Trial 3100 pruned.
[I 2021-05-10 07:56:00,639] Trial 3101 pruned.
[I 2021-05-10 07:56:01,204] Trial 3102 pruned.
[I 2021-05-10 07:56:01,771] Trial 3103 pruned.
[I 2021-05-10 07:56:02,339] Trial 3104 pruned.
[I 2021-05-10 07:56:02,904] Trial 3105 pruned.
[I 2021-05-10 07:56:03,469] Trial 3106 pruned.
[I 2021-05-10 07:56:04,034] Trial 3107 pruned.
[I 2021-05-10 07:56:04,597] Trial 3108 pruned.
[I 2021-05-10 07:56:05,161] Trial 3109 pruned.
[I 2021-05-10 07:56:05,729] Trial 3110 pruned.
[I 2021-05-10 07:56:06,297] Trial 3111 pruned.
[I 2021-05-10 07:56:06,864] Trial 3112 pruned.
[I 2021-05-10 07:56:07,425] Trial 3113 pruned.
[I 2021-05-10 07:56:07,613] Trial 3114 pruned.
[I 2021-05-10 07:56:08,180] Trial 3115 pruned.
[I 2021-05-10 07:56:08,743] Trial 3116 pruned.
[I 2021-05-10 07:56:09,301] Trial 3117 pruned.
[I 2021-05-10 07:56:09,861] Trial 3118 pruned.
[I 2021-05-10 07:56:10,430] Trial 3119 pruned.
[I 2021-05-10 07:56:10,997] Trial 3120 pruned.
[I 2021-05-10 07:56:11,558] Trial 3121 pruned.
[I 2021-05-10 07:56:12,109] Trial 3122 pruned.
[I 2021-05-10 07:56:12,670] Trial 3123 pruned.
[I 2021-05-10 07:56:12,793] Trial 3124 pruned.
[I 2021-05-10 07:56:13,358] Trial 3125 pruned.
[I 2021-05-10 07:56:13,924] Trial 3126 pruned.
[I 2021-05-10 07:56:14,489] Trial 3127 pruned.
[I 2021-05-10 07:56:14,805] Trial 3128 pruned.
[I 2021-05-10 07:56:15,372] Trial 3129 pruned.
[I 2021-05-10 07:56:15,932] Trial 3130 pruned.
[I 2021-05-10 07:56:16,497] Trial 3131 pruned.
[I 2021-05-10 07:56:17,060] Trial 3132 pruned.
[I 2021-05-10 07:56:17,621] Trial 3133 pruned.
[I 2021-05-10 07:56:18,185] Trial 3134 pruned.
[I 2021-05-10 07:56:18,748] Trial 3135 pruned.
[I 2021-05-10 07:56:19,315] Trial 3136 pruned.
[I 2021-05-10 07:56:19,882] Trial 3137 pruned.
[I 2021-05-10 07:56:21,468] Trial 3138 pruned.
[I 2021-05-10 07:56:22,034] Trial 3139 pruned.
[I 2021-05-10 07:56:22,596] Trial 3140 pruned.
[I 2021-05-10 07:56:23,161] Trial 3141 pruned.
[I 2021-05-10 07:56:23,743] Trial 3142 pruned.
[I 2021-05-10 07:56:23,929] Trial 3143 pruned.
[I 2021-05-10 07:56:24,495] Trial 3144 pruned.
[I 2021-05-10 07:56:25,055] Trial 3145 pruned.
[I 2021-05-10 07:56:25,618] Trial 3146 pruned.
[I 2021-05-10 07:56:26,184] Trial 3147 pruned.
[I 2021-05-10 07:56:26,752] Trial 3148 pruned.
[I 2021-05-10 07:56:27,317] Trial 3149 pruned.
[I 2021-05-10 07:56:27,883] Trial 3150 pruned.
[I 2021-05-10 07:56:28,453] Trial 3151 pruned.
[I 2021-05-10 07:56:29,007] Trial 3152 pruned.
[I 2021-05-10 07:56:29,130] Trial 3153 pruned.
[I 2021-05-10 07:56:29,698] Trial 3154 pruned.
[I 2021-05-10 07:56:30,263] Trial 3155 pruned.
[I 2021-05-10 07:56:30,577] Trial 3156 pruned.
[I 2021-05-10 07:56:31,144] Trial 3157 pruned.
[I 2021-05-10 07:56:31,712] Trial 3158 pruned.
[I 2021-05-10 07:56:32,288] Trial 3159 pruned.
[I 2021-05-10 07:56:32,851] Trial 3160 pruned.
[I 2021-05-10 07:56:33,416] Trial 3161 pruned.
[I 2021-05-10 07:56:33,993] Trial 3162 pruned.
[I 2021-05-10 07:56:34,559] Trial 3163 pruned.
[I 2021-05-10 07:56:35,126] Trial 3164 pruned.
[I 2021-05-10 07:56:35,695] Trial 3165 pruned.
[I 2021-05-10 07:56:36,261] Trial 3166 pruned.
[I 2021-05-10 07:56:36,830] Trial 3167 pruned.
[I 2021-05-10 07:56:37,394] Trial 3168 pruned.
[I 2021-05-10 07:56:37,962] Trial 3169 pruned.
[I 2021-05-10 07:56:38,519] Trial 3170 pruned.
[I 2021-05-10 07:56:38,708] Trial 3171 pruned.
[I 2021-05-10 07:56:39,273] Trial 3172 pruned.
[I 2021-05-10 07:56:39,841] Trial 3173 pruned.
[I 2021-05-10 07:56:40,397] Trial 3174 pruned.
[I 2021-05-10 07:56:40,964] Trial 3175 pruned.
[I 2021-05-10 07:56:41,528] Trial 3176 pruned.
[I 2021-05-10 07:56:42,089] Trial 3177 pruned.
[I 2021-05-10 07:56:42,651] Trial 3178 pruned.
[I 2021-05-10 07:56:43,219] Trial 3179 pruned.
[I 2021-05-10 07:56:43,786] Trial 3180 pruned.
[I 2021-05-10 07:56:44,353] Trial 3181 pruned.
[I 2021-05-10 07:56:44,920] Trial 3182 pruned.
[I 2021-05-10 07:56:45,484] Trial 3183 pruned.
[I 2021-05-10 07:56:45,607] Trial 3184 pruned.
[I 2021-05-10 07:56:46,175] Trial 3185 pruned.
[I 2021-05-10 07:56:46,758] Trial 3186 pruned.
[I 2021-05-10 07:56:47,072] Trial 3187 pruned.
[I 2021-05-10 07:56:47,640] Trial 3188 pruned.
[I 2021-05-10 07:56:48,204] Trial 3189 pruned.
[I 2021-05-10 07:56:48,768] Trial 3190 pruned.
[I 2021-05-10 07:56:49,334] Trial 3191 pruned.
[I 2021-05-10 07:56:49,902] Trial 3192 pruned.
[I 2021-05-10 07:56:50,467] Trial 3193 pruned.
[I 2021-05-10 07:56:51,036] Trial 3194 pruned.
[I 2021-05-10 07:56:51,603] Trial 3195 pruned.
[I 2021-05-10 07:56:52,173] Trial 3196 pruned.
[I 2021-05-10 07:56:52,736] Trial 3197 pruned.
[I 2021-05-10 07:56:53,304] Trial 3198 pruned.
[I 2021-05-10 07:56:53,873] Trial 3199 pruned.
[I 2021-05-10 07:56:54,444] Trial 3200 pruned.
[I 2021-05-10 07:56:55,002] Trial 3201 pruned.
[I 2021-05-10 07:56:55,193] Trial 3202 pruned.
[I 2021-05-10 07:56:55,763] Trial 3203 pruned.
[I 2021-05-10 07:56:56,329] Trial 3204 pruned.
[I 2021-05-10 07:56:56,898] Trial 3205 pruned.
[I 2021-05-10 07:56:57,452] Trial 3206 pruned.
[I 2021-05-10 07:56:58,019] Trial 3207 pruned.
[I 2021-05-10 07:56:58,584] Trial 3208 pruned.
[I 2021-05-10 07:56:59,149] Trial 3209 pruned.
[I 2021-05-10 07:56:59,722] Trial 3210 pruned.
[I 2021-05-10 07:56:59,848] Trial 3211 pruned.
[I 2021-05-10 07:57:00,417] Trial 3212 pruned.
[I 2021-05-10 07:57:00,986] Trial 3213 pruned.
[I 2021-05-10 07:57:01,298] Trial 3214 pruned.
[I 2021-05-10 07:57:01,866] Trial 3215 pruned.
[I 2021-05-10 07:57:02,435] Trial 3216 pruned.
[I 2021-05-10 07:57:03,005] Trial 3217 pruned.
[I 2021-05-10 07:57:03,573] Trial 3218 pruned.
[I 2021-05-10 07:57:04,142] Trial 3219 pruned.
[I 2021-05-10 07:57:04,707] Trial 3220 pruned.
[I 2021-05-10 07:57:05,276] Trial 3221 pruned.
[I 2021-05-10 07:57:56,587] Trial 3222 finished with value: 372.9859619140625 and parameters: {'lr': 0.005638167404770954, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:57:57,159] Trial 3223 pruned.
[I 2021-05-10 07:57:57,725] Trial 3224 pruned.
[I 2021-05-10 07:57:58,284] Trial 3225 pruned.
[I 2021-05-10 07:57:58,850] Trial 3226 pruned.
[I 2021-05-10 07:57:59,412] Trial 3227 pruned.
[I 2021-05-10 07:57:59,981] Trial 3228 pruned.
[I 2021-05-10 07:58:00,550] Trial 3229 pruned.
[I 2021-05-10 07:58:00,738] Trial 3230 pruned.
[I 2021-05-10 07:58:01,309] Trial 3231 pruned.
[I 2021-05-10 07:58:01,866] Trial 3232 pruned.
[I 2021-05-10 07:58:02,433] Trial 3233 pruned.
[I 2021-05-10 07:58:04,027] Trial 3234 pruned.
[I 2021-05-10 07:58:04,593] Trial 3235 pruned.
[I 2021-05-10 07:58:05,162] Trial 3236 pruned.
[I 2021-05-10 07:58:05,730] Trial 3237 pruned.
[I 2021-05-10 07:58:06,302] Trial 3238 pruned.
[I 2021-05-10 07:58:06,873] Trial 3239 pruned.
[I 2021-05-10 07:58:07,447] Trial 3240 pruned.
[I 2021-05-10 07:58:07,572] Trial 3241 pruned.
[I 2021-05-10 07:58:08,141] Trial 3242 pruned.
[I 2021-05-10 07:58:08,707] Trial 3243 pruned.
[I 2021-05-10 07:58:09,275] Trial 3244 pruned.
[I 2021-05-10 07:58:09,591] Trial 3245 pruned.
[I 2021-05-10 07:58:10,160] Trial 3246 pruned.
[I 2021-05-10 07:58:10,728] Trial 3247 pruned.
[I 2021-05-10 07:58:11,294] Trial 3248 pruned.
[I 2021-05-10 07:58:11,864] Trial 3249 pruned.
[I 2021-05-10 07:58:12,431] Trial 3250 pruned.
[I 2021-05-10 07:58:13,001] Trial 3251 pruned.
[I 2021-05-10 07:58:13,572] Trial 3252 pruned.
[I 2021-05-10 07:58:14,126] Trial 3253 pruned.
[I 2021-05-10 07:58:14,692] Trial 3254 pruned.
[I 2021-05-10 07:58:15,255] Trial 3255 pruned.
[I 2021-05-10 07:58:15,822] Trial 3256 pruned.
[I 2021-05-10 07:58:16,389] Trial 3257 pruned.
[I 2021-05-10 07:58:16,959] Trial 3258 pruned.
[I 2021-05-10 07:58:17,147] Trial 3259 pruned.
[I 2021-05-10 07:58:17,715] Trial 3260 pruned.
[I 2021-05-10 07:58:18,281] Trial 3261 pruned.
[I 2021-05-10 07:58:18,850] Trial 3262 pruned.
[I 2021-05-10 07:58:19,418] Trial 3263 pruned.
[I 2021-05-10 07:58:21,006] Trial 3264 pruned.
[I 2021-05-10 07:58:21,573] Trial 3265 pruned.
[I 2021-05-10 07:58:22,141] Trial 3266 pruned.
[I 2021-05-10 07:58:22,714] Trial 3267 pruned.
[I 2021-05-10 07:58:23,287] Trial 3268 pruned.
[I 2021-05-10 07:58:23,854] Trial 3269 pruned.
[I 2021-05-10 07:58:23,980] Trial 3270 pruned.
[I 2021-05-10 07:58:24,548] Trial 3271 pruned.
[I 2021-05-10 07:58:25,113] Trial 3272 pruned.
[I 2021-05-10 07:58:25,678] Trial 3273 pruned.
[I 2021-05-10 07:58:26,246] Trial 3274 pruned.
[I 2021-05-10 07:58:26,562] Trial 3275 pruned.
[I 2021-05-10 07:58:27,137] Trial 3276 pruned.
[I 2021-05-10 07:58:27,707] Trial 3277 pruned.
[I 2021-05-10 07:58:28,277] Trial 3278 pruned.
[I 2021-05-10 07:58:28,831] Trial 3279 pruned.
[I 2021-05-10 07:58:29,401] Trial 3280 pruned.
[I 2021-05-10 07:58:29,969] Trial 3281 pruned.
[I 2021-05-10 07:58:30,537] Trial 3282 pruned.
[I 2021-05-10 07:58:31,097] Trial 3283 pruned.
[I 2021-05-10 07:58:31,671] Trial 3284 pruned.
[I 2021-05-10 07:58:32,244] Trial 3285 pruned.
[I 2021-05-10 07:58:32,806] Trial 3286 pruned.
[I 2021-05-10 07:58:33,372] Trial 3287 pruned.
[I 2021-05-10 07:58:33,561] Trial 3288 pruned.
[I 2021-05-10 07:58:34,130] Trial 3289 pruned.
[I 2021-05-10 07:58:34,702] Trial 3290 pruned.
[I 2021-05-10 07:58:35,269] Trial 3291 pruned.
[I 2021-05-10 07:58:35,837] Trial 3292 pruned.
[I 2021-05-10 07:58:36,410] Trial 3293 pruned.
[I 2021-05-10 07:58:36,980] Trial 3294 pruned.
[I 2021-05-10 07:58:37,550] Trial 3295 pruned.
[I 2021-05-10 07:58:38,114] Trial 3296 pruned.
[I 2021-05-10 07:58:38,685] Trial 3297 pruned.
[I 2021-05-10 07:58:39,254] Trial 3298 pruned.
[I 2021-05-10 07:58:39,825] Trial 3299 pruned.
[I 2021-05-10 07:58:40,394] Trial 3300 pruned.
[I 2021-05-10 07:58:40,519] Trial 3301 pruned.
[I 2021-05-10 07:58:41,089] Trial 3302 pruned.
[I 2021-05-10 07:58:41,659] Trial 3303 pruned.
[I 2021-05-10 07:58:42,226] Trial 3304 pruned.
[I 2021-05-10 07:58:42,797] Trial 3305 pruned.
[I 2021-05-10 07:58:43,113] Trial 3306 pruned.
[I 2021-05-10 07:58:43,686] Trial 3307 pruned.
[I 2021-05-10 07:58:44,247] Trial 3308 pruned.
[I 2021-05-10 07:58:44,808] Trial 3309 pruned.
[I 2021-05-10 07:58:45,378] Trial 3310 pruned.
[I 2021-05-10 07:58:45,939] Trial 3311 pruned.
[I 2021-05-10 07:58:46,513] Trial 3312 pruned.
[I 2021-05-10 07:58:47,076] Trial 3313 pruned.
[I 2021-05-10 07:58:47,640] Trial 3314 pruned.
[I 2021-05-10 07:58:48,211] Trial 3315 pruned.
[I 2021-05-10 07:58:48,779] Trial 3316 pruned.
[I 2021-05-10 07:58:48,968] Trial 3317 pruned.
[I 2021-05-10 07:58:49,532] Trial 3318 pruned.
[I 2021-05-10 07:58:50,101] Trial 3319 pruned.
[I 2021-05-10 07:58:50,674] Trial 3320 pruned.
[I 2021-05-10 07:58:51,242] Trial 3321 pruned.
[I 2021-05-10 07:58:51,799] Trial 3322 pruned.
[I 2021-05-10 07:58:52,368] Trial 3323 pruned.
[I 2021-05-10 07:58:52,949] Trial 3324 pruned.
[I 2021-05-10 07:58:53,522] Trial 3325 pruned.
[I 2021-05-10 07:58:54,083] Trial 3326 pruned.
[I 2021-05-10 07:58:54,652] Trial 3327 pruned.
[I 2021-05-10 07:58:55,221] Trial 3328 pruned.
[I 2021-05-10 07:58:55,349] Trial 3329 pruned.
[I 2021-05-10 07:58:55,916] Trial 3330 pruned.
[I 2021-05-10 07:58:56,483] Trial 3331 pruned.
[I 2021-05-10 07:58:56,800] Trial 3332 pruned.
[I 2021-05-10 07:58:57,361] Trial 3333 pruned.
[I 2021-05-10 07:59:47,984] Trial 3334 finished with value: 371.3515930175781 and parameters: {'lr': 0.004504857575748979, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 07:59:48,548] Trial 3335 pruned.
[I 2021-05-10 07:59:49,096] Trial 3336 pruned.
[I 2021-05-10 07:59:49,659] Trial 3337 pruned.
[I 2021-05-10 07:59:50,225] Trial 3338 pruned.
[I 2021-05-10 07:59:50,784] Trial 3339 pruned.
[I 2021-05-10 07:59:51,337] Trial 3340 pruned.
[I 2021-05-10 07:59:51,900] Trial 3341 pruned.
[I 2021-05-10 08:00:42,519] Trial 3342 finished with value: 367.54864501953125 and parameters: {'lr': 0.005187131973195774, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:00:43,078] Trial 3343 pruned.
[I 2021-05-10 08:00:44,633] Trial 3344 pruned.
[I 2021-05-10 08:00:45,192] Trial 3345 pruned.
[I 2021-05-10 08:00:45,747] Trial 3346 pruned.
[I 2021-05-10 08:00:45,935] Trial 3347 pruned.
[I 2021-05-10 08:01:36,576] Trial 3348 finished with value: 488.7875061035156 and parameters: {'lr': 0.006527368347384893, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:01:37,136] Trial 3349 pruned.
[I 2021-05-10 08:01:37,703] Trial 3350 pruned.
[I 2021-05-10 08:01:38,266] Trial 3351 pruned.
[I 2021-05-10 08:01:38,822] Trial 3352 pruned.
[I 2021-05-10 08:01:39,387] Trial 3353 pruned.
[I 2021-05-10 08:01:39,952] Trial 3354 pruned.
[I 2021-05-10 08:01:40,515] Trial 3355 pruned.
[I 2021-05-10 08:02:31,047] Trial 3356 finished with value: 375.7452392578125 and parameters: {'lr': 0.004593719489295627, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:02:31,608] Trial 3357 pruned.
[I 2021-05-10 08:02:32,172] Trial 3358 pruned.
[I 2021-05-10 08:02:32,298] Trial 3359 pruned.
[I 2021-05-10 08:02:32,858] Trial 3360 pruned.
[I 2021-05-10 08:02:33,164] Trial 3361 pruned.
[I 2021-05-10 08:02:33,730] Trial 3362 pruned.
[I 2021-05-10 08:02:34,288] Trial 3363 pruned.
[I 2021-05-10 08:02:34,837] Trial 3364 pruned.
[I 2021-05-10 08:02:35,394] Trial 3365 pruned.
[I 2021-05-10 08:02:35,959] Trial 3366 pruned.
[I 2021-05-10 08:02:36,516] Trial 3367 pruned.
[I 2021-05-10 08:02:37,075] Trial 3368 pruned.
[I 2021-05-10 08:02:37,621] Trial 3369 pruned.
[I 2021-05-10 08:02:38,187] Trial 3370 pruned.
[I 2021-05-10 08:02:38,744] Trial 3371 pruned.
[I 2021-05-10 08:02:39,305] Trial 3372 pruned.
[I 2021-05-10 08:02:39,860] Trial 3373 pruned.
[I 2021-05-10 08:02:40,418] Trial 3374 pruned.
[I 2021-05-10 08:02:40,976] Trial 3375 pruned.
[I 2021-05-10 08:02:41,537] Trial 3376 pruned.
[I 2021-05-10 08:02:41,724] Trial 3377 pruned.
[I 2021-05-10 08:02:42,288] Trial 3378 pruned.
[I 2021-05-10 08:02:42,852] Trial 3379 pruned.
[I 2021-05-10 08:02:43,413] Trial 3380 pruned.
[I 2021-05-10 08:02:43,977] Trial 3381 pruned.
[I 2021-05-10 08:02:44,537] Trial 3382 pruned.
[I 2021-05-10 08:02:45,096] Trial 3383 pruned.
[I 2021-05-10 08:02:45,655] Trial 3384 pruned.
[I 2021-05-10 08:02:46,217] Trial 3385 pruned.
[I 2021-05-10 08:02:46,790] Trial 3386 pruned.
[I 2021-05-10 08:02:47,356] Trial 3387 pruned.
[I 2021-05-10 08:02:47,911] Trial 3388 pruned.
[I 2021-05-10 08:02:48,225] Trial 3389 pruned.
[I 2021-05-10 08:02:48,351] Trial 3390 pruned.
[I 2021-05-10 08:02:48,914] Trial 3391 pruned.
[I 2021-05-10 08:02:49,477] Trial 3392 pruned.
[I 2021-05-10 08:02:50,034] Trial 3393 pruned.
[I 2021-05-10 08:02:50,595] Trial 3394 pruned.
[I 2021-05-10 08:02:51,148] Trial 3395 pruned.
[I 2021-05-10 08:02:51,719] Trial 3396 pruned.
[I 2021-05-10 08:02:52,281] Trial 3397 pruned.
[I 2021-05-10 08:02:52,852] Trial 3398 pruned.
[I 2021-05-10 08:02:53,420] Trial 3399 pruned.
[I 2021-05-10 08:02:54,008] Trial 3400 pruned.
[I 2021-05-10 08:02:54,584] Trial 3401 pruned.
[I 2021-05-10 08:02:55,151] Trial 3402 pruned.
[I 2021-05-10 08:02:55,725] Trial 3403 pruned.
[I 2021-05-10 08:02:56,288] Trial 3404 pruned.
[I 2021-05-10 08:02:56,477] Trial 3405 pruned.
[I 2021-05-10 08:02:57,040] Trial 3406 pruned.
[I 2021-05-10 08:02:57,625] Trial 3407 pruned.
[I 2021-05-10 08:02:58,193] Trial 3408 pruned.
[I 2021-05-10 08:02:58,751] Trial 3409 pruned.
[I 2021-05-10 08:02:59,315] Trial 3410 pruned.
[I 2021-05-10 08:02:59,879] Trial 3411 pruned.
[I 2021-05-10 08:03:00,440] Trial 3412 pruned.
[I 2021-05-10 08:03:01,001] Trial 3413 pruned.
[I 2021-05-10 08:03:01,559] Trial 3414 pruned.
[I 2021-05-10 08:03:02,123] Trial 3415 pruned.
[I 2021-05-10 08:03:02,678] Trial 3416 pruned.
[I 2021-05-10 08:03:02,806] Trial 3417 pruned.
[I 2021-05-10 08:03:03,361] Trial 3418 pruned.
[I 2021-05-10 08:03:03,673] Trial 3419 pruned.
[I 2021-05-10 08:03:04,239] Trial 3420 pruned.
[I 2021-05-10 08:03:05,812] Trial 3421 pruned.
[I 2021-05-10 08:03:07,395] Trial 3422 pruned.
[I 2021-05-10 08:03:08,939] Trial 3423 pruned.
[I 2021-05-10 08:03:09,504] Trial 3424 pruned.
[I 2021-05-10 08:03:10,067] Trial 3425 pruned.
[I 2021-05-10 08:03:10,626] Trial 3426 pruned.
[I 2021-05-10 08:03:11,192] Trial 3427 pruned.
[I 2021-05-10 08:03:11,756] Trial 3428 pruned.
[I 2021-05-10 08:03:12,326] Trial 3429 pruned.
[I 2021-05-10 08:03:12,886] Trial 3430 pruned.
[I 2021-05-10 08:03:13,452] Trial 3431 pruned.
[I 2021-05-10 08:03:14,016] Trial 3432 pruned.
[I 2021-05-10 08:03:14,574] Trial 3433 pruned.
[I 2021-05-10 08:03:15,138] Trial 3434 pruned.
[I 2021-05-10 08:03:15,704] Trial 3435 pruned.
[I 2021-05-10 08:03:15,895] Trial 3436 pruned.
[I 2021-05-10 08:03:16,455] Trial 3437 pruned.
[I 2021-05-10 08:03:17,018] Trial 3438 pruned.
[I 2021-05-10 08:03:17,587] Trial 3439 pruned.
[I 2021-05-10 08:03:18,150] Trial 3440 pruned.
[I 2021-05-10 08:03:18,711] Trial 3441 pruned.
[I 2021-05-10 08:03:19,271] Trial 3442 pruned.
[I 2021-05-10 08:03:19,829] Trial 3443 pruned.
[I 2021-05-10 08:03:20,394] Trial 3444 pruned.
[I 2021-05-10 08:03:20,947] Trial 3445 pruned.
[I 2021-05-10 08:03:21,074] Trial 3446 pruned.
[I 2021-05-10 08:03:21,634] Trial 3447 pruned.
[I 2021-05-10 08:03:22,244] Trial 3448 pruned.
[I 2021-05-10 08:03:22,794] Trial 3449 pruned.
[I 2021-05-10 08:03:23,108] Trial 3450 pruned.
[I 2021-05-10 08:03:23,674] Trial 3451 pruned.
[I 2021-05-10 08:03:24,234] Trial 3452 pruned.
[I 2021-05-10 08:03:24,797] Trial 3453 pruned.
[I 2021-05-10 08:03:25,362] Trial 3454 pruned.
[I 2021-05-10 08:03:25,923] Trial 3455 pruned.
[I 2021-05-10 08:03:26,486] Trial 3456 pruned.
[I 2021-05-10 08:03:27,051] Trial 3457 pruned.
[I 2021-05-10 08:03:27,610] Trial 3458 pruned.
[I 2021-05-10 08:04:18,224] Trial 3459 finished with value: 380.4921569824219 and parameters: {'lr': 0.0035429151247849525, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:04:18,797] Trial 3460 pruned.
[I 2021-05-10 08:04:19,368] Trial 3461 pruned.
[I 2021-05-10 08:04:19,942] Trial 3462 pruned.
[I 2021-05-10 08:04:20,503] Trial 3463 pruned.
[I 2021-05-10 08:04:21,064] Trial 3464 pruned.
[I 2021-05-10 08:04:21,624] Trial 3465 pruned.
[I 2021-05-10 08:04:21,814] Trial 3466 pruned.
[I 2021-05-10 08:04:22,379] Trial 3467 pruned.
[I 2021-05-10 08:04:22,943] Trial 3468 pruned.
[I 2021-05-10 08:04:23,505] Trial 3469 pruned.
[I 2021-05-10 08:04:24,072] Trial 3470 pruned.
[I 2021-05-10 08:04:24,631] Trial 3471 pruned.
[I 2021-05-10 08:04:25,176] Trial 3472 pruned.
[I 2021-05-10 08:04:25,742] Trial 3473 pruned.
[I 2021-05-10 08:04:26,317] Trial 3474 pruned.
[I 2021-05-10 08:04:26,445] Trial 3475 pruned.
[I 2021-05-10 08:04:27,006] Trial 3476 pruned.
[I 2021-05-10 08:04:27,319] Trial 3477 pruned.
[I 2021-05-10 08:04:27,876] Trial 3478 pruned.
[I 2021-05-10 08:04:28,439] Trial 3479 pruned.
[I 2021-05-10 08:04:28,999] Trial 3480 pruned.
[I 2021-05-10 08:04:29,559] Trial 3481 pruned.
[I 2021-05-10 08:04:30,138] Trial 3482 pruned.
[I 2021-05-10 08:04:30,704] Trial 3483 pruned.
[I 2021-05-10 08:04:31,264] Trial 3484 pruned.
[I 2021-05-10 08:04:31,831] Trial 3485 pruned.
[I 2021-05-10 08:04:32,400] Trial 3486 pruned.
[I 2021-05-10 08:04:32,964] Trial 3487 pruned.
[I 2021-05-10 08:04:33,524] Trial 3488 pruned.
[I 2021-05-10 08:04:34,093] Trial 3489 pruned.
[I 2021-05-10 08:04:34,658] Trial 3490 pruned.
[I 2021-05-10 08:04:35,226] Trial 3491 pruned.
[I 2021-05-10 08:04:35,785] Trial 3492 pruned.
[I 2021-05-10 08:04:36,347] Trial 3493 pruned.
[I 2021-05-10 08:04:36,538] Trial 3494 pruned.
[I 2021-05-10 08:04:37,103] Trial 3495 pruned.
[I 2021-05-10 08:04:37,661] Trial 3496 pruned.
[I 2021-05-10 08:04:38,233] Trial 3497 pruned.
[I 2021-05-10 08:04:38,793] Trial 3498 pruned.
[I 2021-05-10 08:04:39,348] Trial 3499 pruned.
[I 2021-05-10 08:04:39,908] Trial 3500 pruned.
[I 2021-05-10 08:04:40,476] Trial 3501 pruned.
[I 2021-05-10 08:04:41,038] Trial 3502 pruned.
[I 2021-05-10 08:04:41,600] Trial 3503 pruned.
[I 2021-05-10 08:04:41,729] Trial 3504 pruned.
[I 2021-05-10 08:04:42,289] Trial 3505 pruned.
[I 2021-05-10 08:04:42,849] Trial 3506 pruned.
[I 2021-05-10 08:04:43,416] Trial 3507 pruned.
[I 2021-05-10 08:04:43,731] Trial 3508 pruned.
[I 2021-05-10 08:04:44,298] Trial 3509 pruned.
[I 2021-05-10 08:04:44,856] Trial 3510 pruned.
[I 2021-05-10 08:04:45,418] Trial 3511 pruned.
[I 2021-05-10 08:04:45,978] Trial 3512 pruned.
[I 2021-05-10 08:04:46,551] Trial 3513 pruned.
[I 2021-05-10 08:04:47,120] Trial 3514 pruned.
[I 2021-05-10 08:04:48,701] Trial 3515 pruned.
[I 2021-05-10 08:04:49,266] Trial 3516 pruned.
[I 2021-05-10 08:04:49,841] Trial 3517 pruned.
[I 2021-05-10 08:04:50,414] Trial 3518 pruned.
[I 2021-05-10 08:04:50,977] Trial 3519 pruned.
[I 2021-05-10 08:04:51,549] Trial 3520 pruned.
[I 2021-05-10 08:04:52,123] Trial 3521 pruned.
[I 2021-05-10 08:04:52,315] Trial 3522 pruned.
[I 2021-05-10 08:04:52,892] Trial 3523 pruned.
[I 2021-05-10 08:04:53,460] Trial 3524 pruned.
[I 2021-05-10 08:04:54,026] Trial 3525 pruned.
[I 2021-05-10 08:04:54,590] Trial 3526 pruned.
[I 2021-05-10 08:04:55,166] Trial 3527 pruned.
[I 2021-05-10 08:04:55,732] Trial 3528 pruned.
[I 2021-05-10 08:04:56,306] Trial 3529 pruned.
[I 2021-05-10 08:04:56,874] Trial 3530 pruned.
[I 2021-05-10 08:04:57,431] Trial 3531 pruned.
[I 2021-05-10 08:04:58,006] Trial 3532 pruned.
[I 2021-05-10 08:04:58,581] Trial 3533 pruned.
[I 2021-05-10 08:04:58,711] Trial 3534 pruned.
[I 2021-05-10 08:04:59,287] Trial 3535 pruned.
[I 2021-05-10 08:04:59,864] Trial 3536 pruned.
[I 2021-05-10 08:05:00,182] Trial 3537 pruned.
[I 2021-05-10 08:05:00,752] Trial 3538 pruned.
[I 2021-05-10 08:05:01,327] Trial 3539 pruned.
[I 2021-05-10 08:05:01,888] Trial 3540 pruned.
[I 2021-05-10 08:05:02,458] Trial 3541 pruned.
[I 2021-05-10 08:05:03,031] Trial 3542 pruned.
[I 2021-05-10 08:05:03,592] Trial 3543 pruned.
[I 2021-05-10 08:05:04,166] Trial 3544 pruned.
[I 2021-05-10 08:05:04,738] Trial 3545 pruned.
[I 2021-05-10 08:05:05,302] Trial 3546 pruned.
[I 2021-05-10 08:05:05,870] Trial 3547 pruned.
[I 2021-05-10 08:05:06,443] Trial 3548 pruned.
[I 2021-05-10 08:05:07,006] Trial 3549 pruned.
[I 2021-05-10 08:05:07,576] Trial 3550 pruned.
[I 2021-05-10 08:05:08,155] Trial 3551 pruned.
[I 2021-05-10 08:05:08,710] Trial 3552 pruned.
[I 2021-05-10 08:05:08,903] Trial 3553 pruned.
[I 2021-05-10 08:05:09,479] Trial 3554 pruned.
[I 2021-05-10 08:05:10,046] Trial 3555 pruned.
[I 2021-05-10 08:05:10,624] Trial 3556 pruned.
[I 2021-05-10 08:05:11,201] Trial 3557 pruned.
[I 2021-05-10 08:05:11,756] Trial 3558 pruned.
[I 2021-05-10 08:05:12,328] Trial 3559 pruned.
[I 2021-05-10 08:05:12,899] Trial 3560 pruned.
[I 2021-05-10 08:05:13,469] Trial 3561 pruned.
[I 2021-05-10 08:05:14,040] Trial 3562 pruned.
[I 2021-05-10 08:05:14,610] Trial 3563 pruned.
[I 2021-05-10 08:05:14,739] Trial 3564 pruned.
[I 2021-05-10 08:05:15,061] Trial 3565 pruned.
[I 2021-05-10 08:05:15,632] Trial 3566 pruned.
[I 2021-05-10 08:05:16,200] Trial 3567 pruned.
[I 2021-05-10 08:05:16,776] Trial 3568 pruned.
[I 2021-05-10 08:05:17,349] Trial 3569 pruned.
[I 2021-05-10 08:05:17,918] Trial 3570 pruned.
[I 2021-05-10 08:05:18,494] Trial 3571 pruned.
[I 2021-05-10 08:05:19,069] Trial 3572 pruned.
[I 2021-05-10 08:05:19,632] Trial 3573 pruned.
[I 2021-05-10 08:05:20,204] Trial 3574 pruned.
[I 2021-05-10 08:05:20,775] Trial 3575 pruned.
[I 2021-05-10 08:05:21,340] Trial 3576 pruned.
[I 2021-05-10 08:05:21,914] Trial 3577 pruned.
[I 2021-05-10 08:05:22,475] Trial 3578 pruned.
[I 2021-05-10 08:05:23,035] Trial 3579 pruned.
[I 2021-05-10 08:05:23,230] Trial 3580 pruned.
[I 2021-05-10 08:05:23,802] Trial 3581 pruned.
[I 2021-05-10 08:05:24,371] Trial 3582 pruned.
[I 2021-05-10 08:05:24,943] Trial 3583 pruned.
[I 2021-05-10 08:05:25,512] Trial 3584 pruned.
[I 2021-05-10 08:05:26,079] Trial 3585 pruned.
[I 2021-05-10 08:05:26,638] Trial 3586 pruned.
[I 2021-05-10 08:05:27,215] Trial 3587 pruned.
[I 2021-05-10 08:05:27,779] Trial 3588 pruned.
[I 2021-05-10 08:05:28,361] Trial 3589 pruned.
[I 2021-05-10 08:05:28,932] Trial 3590 pruned.
[I 2021-05-10 08:05:29,498] Trial 3591 pruned.
[I 2021-05-10 08:05:29,627] Trial 3592 pruned.
[I 2021-05-10 08:05:30,202] Trial 3593 pruned.
[I 2021-05-10 08:05:30,771] Trial 3594 pruned.
[I 2021-05-10 08:05:31,346] Trial 3595 pruned.
[I 2021-05-10 08:05:31,668] Trial 3596 pruned.
[I 2021-05-10 08:05:32,238] Trial 3597 pruned.
[I 2021-05-10 08:05:32,813] Trial 3598 pruned.
[I 2021-05-10 08:05:33,385] Trial 3599 pruned.
[I 2021-05-10 08:05:33,950] Trial 3600 pruned.
[I 2021-05-10 08:05:34,520] Trial 3601 pruned.
[I 2021-05-10 08:05:35,092] Trial 3602 pruned.
[I 2021-05-10 08:05:35,662] Trial 3603 pruned.
[I 2021-05-10 08:05:36,239] Trial 3604 pruned.
[I 2021-05-10 08:05:36,805] Trial 3605 pruned.
[I 2021-05-10 08:05:37,371] Trial 3606 pruned.
[I 2021-05-10 08:05:37,938] Trial 3607 pruned.
[I 2021-05-10 08:05:38,512] Trial 3608 pruned.
[I 2021-05-10 08:05:39,072] Trial 3609 pruned.
[I 2021-05-10 08:05:39,643] Trial 3610 pruned.
[I 2021-05-10 08:06:29,360] Trial 3611 pruned.
[I 2021-05-10 08:06:29,550] Trial 3612 pruned.
[I 2021-05-10 08:06:30,125] Trial 3613 pruned.
[I 2021-05-10 08:06:30,707] Trial 3614 pruned.
[I 2021-05-10 08:06:31,271] Trial 3615 pruned.
[I 2021-05-10 08:06:31,844] Trial 3616 pruned.
[I 2021-05-10 08:06:32,421] Trial 3617 pruned.
[I 2021-05-10 08:06:32,987] Trial 3618 pruned.
[I 2021-05-10 08:06:33,559] Trial 3619 pruned.
[I 2021-05-10 08:06:34,132] Trial 3620 pruned.
[I 2021-05-10 08:06:34,273] Trial 3621 pruned.
[I 2021-05-10 08:06:34,845] Trial 3622 pruned.
[I 2021-05-10 08:06:35,169] Trial 3623 pruned.
[I 2021-05-10 08:06:35,734] Trial 3624 pruned.
[I 2021-05-10 08:06:36,308] Trial 3625 pruned.
[I 2021-05-10 08:06:36,884] Trial 3626 pruned.
[I 2021-05-10 08:06:37,445] Trial 3627 pruned.
[I 2021-05-10 08:06:38,018] Trial 3628 pruned.
[I 2021-05-10 08:06:38,589] Trial 3629 pruned.
[I 2021-05-10 08:06:39,155] Trial 3630 pruned.
[I 2021-05-10 08:06:39,729] Trial 3631 pruned.
[I 2021-05-10 08:06:40,302] Trial 3632 pruned.
[I 2021-05-10 08:06:40,865] Trial 3633 pruned.
[I 2021-05-10 08:06:41,427] Trial 3634 pruned.
[I 2021-05-10 08:06:41,990] Trial 3635 pruned.
[I 2021-05-10 08:06:42,554] Trial 3636 pruned.
[I 2021-05-10 08:06:43,127] Trial 3637 pruned.
[I 2021-05-10 08:06:43,323] Trial 3638 pruned.
[I 2021-05-10 08:06:43,892] Trial 3639 pruned.
[I 2021-05-10 08:06:44,454] Trial 3640 pruned.
[I 2021-05-10 08:06:45,027] Trial 3641 pruned.
[I 2021-05-10 08:06:45,589] Trial 3642 pruned.
[I 2021-05-10 08:06:46,164] Trial 3643 pruned.
[I 2021-05-10 08:06:46,765] Trial 3644 pruned.
[I 2021-05-10 08:06:47,336] Trial 3645 pruned.
[I 2021-05-10 08:06:47,912] Trial 3646 pruned.
[I 2021-05-10 08:06:48,483] Trial 3647 pruned.
[I 2021-05-10 08:06:49,052] Trial 3648 pruned.
[I 2021-05-10 08:06:49,621] Trial 3649 pruned.
[I 2021-05-10 08:06:49,755] Trial 3650 pruned.
[I 2021-05-10 08:06:50,319] Trial 3651 pruned.
[I 2021-05-10 08:06:50,642] Trial 3652 pruned.
[I 2021-05-10 08:06:51,214] Trial 3653 pruned.
[I 2021-05-10 08:06:51,779] Trial 3654 pruned.
[I 2021-05-10 08:06:52,356] Trial 3655 pruned.
[I 2021-05-10 08:07:43,752] Trial 3656 finished with value: 390.1944885253906 and parameters: {'lr': 0.003461314680715561, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:07:44,317] Trial 3657 pruned.
[I 2021-05-10 08:07:44,889] Trial 3658 pruned.
[I 2021-05-10 08:07:45,467] Trial 3659 pruned.
[I 2021-05-10 08:07:46,038] Trial 3660 pruned.
[I 2021-05-10 08:07:46,595] Trial 3661 pruned.
[I 2021-05-10 08:07:47,167] Trial 3662 pruned.
[I 2021-05-10 08:07:47,732] Trial 3663 pruned.
[I 2021-05-10 08:07:49,334] Trial 3664 pruned.
[I 2021-05-10 08:07:49,912] Trial 3665 pruned.
[I 2021-05-10 08:07:50,471] Trial 3666 pruned.
[I 2021-05-10 08:07:51,044] Trial 3667 pruned.
[I 2021-05-10 08:07:52,640] Trial 3668 pruned.
[I 2021-05-10 08:07:52,834] Trial 3669 pruned.
[I 2021-05-10 08:07:53,408] Trial 3670 pruned.
[I 2021-05-10 08:07:53,987] Trial 3671 pruned.
[I 2021-05-10 08:07:54,552] Trial 3672 pruned.
[I 2021-05-10 08:07:55,129] Trial 3673 pruned.
[I 2021-05-10 08:07:56,732] Trial 3674 pruned.
[I 2021-05-10 08:07:57,297] Trial 3675 pruned.
[I 2021-05-10 08:07:57,872] Trial 3676 pruned.
[I 2021-05-10 08:07:58,448] Trial 3677 pruned.
[I 2021-05-10 08:07:59,016] Trial 3678 pruned.
[I 2021-05-10 08:07:59,591] Trial 3679 pruned.
[I 2021-05-10 08:08:00,167] Trial 3680 pruned.
[I 2021-05-10 08:08:00,484] Trial 3681 pruned.
[I 2021-05-10 08:08:00,620] Trial 3682 pruned.
[I 2021-05-10 08:08:01,198] Trial 3683 pruned.
[I 2021-05-10 08:08:01,768] Trial 3684 pruned.
[I 2021-05-10 08:08:02,345] Trial 3685 pruned.
[I 2021-05-10 08:08:02,919] Trial 3686 pruned.
[I 2021-05-10 08:08:03,490] Trial 3687 pruned.
[I 2021-05-10 08:08:04,059] Trial 3688 pruned.
[I 2021-05-10 08:08:04,621] Trial 3689 pruned.
[I 2021-05-10 08:08:05,191] Trial 3690 pruned.
[I 2021-05-10 08:08:05,766] Trial 3691 pruned.
[I 2021-05-10 08:08:06,345] Trial 3692 pruned.
[I 2021-05-10 08:08:06,917] Trial 3693 pruned.
[I 2021-05-10 08:08:07,486] Trial 3694 pruned.
[I 2021-05-10 08:08:08,061] Trial 3695 pruned.
[I 2021-05-10 08:08:08,625] Trial 3696 pruned.
[I 2021-05-10 08:08:08,824] Trial 3697 pruned.
[I 2021-05-10 08:08:09,401] Trial 3698 pruned.
[I 2021-05-10 08:08:09,969] Trial 3699 pruned.
[I 2021-05-10 08:08:10,544] Trial 3700 pruned.
[I 2021-05-10 08:08:11,123] Trial 3701 pruned.
[I 2021-05-10 08:08:11,694] Trial 3702 pruned.
[I 2021-05-10 08:08:12,269] Trial 3703 pruned.
[I 2021-05-10 08:08:12,845] Trial 3704 pruned.
[I 2021-05-10 08:08:14,415] Trial 3705 pruned.
[I 2021-05-10 08:08:14,988] Trial 3706 pruned.
[I 2021-05-10 08:08:15,559] Trial 3707 pruned.
[I 2021-05-10 08:08:15,692] Trial 3708 pruned.
[I 2021-05-10 08:08:16,268] Trial 3709 pruned.
[I 2021-05-10 08:08:16,846] Trial 3710 pruned.
[I 2021-05-10 08:08:17,164] Trial 3711 pruned.
[I 2021-05-10 08:08:17,739] Trial 3712 pruned.
[I 2021-05-10 08:08:18,314] Trial 3713 pruned.
[I 2021-05-10 08:08:18,873] Trial 3714 pruned.
[I 2021-05-10 08:08:19,449] Trial 3715 pruned.
[I 2021-05-10 08:08:20,024] Trial 3716 pruned.
[I 2021-05-10 08:08:20,578] Trial 3717 pruned.
[I 2021-05-10 08:08:21,155] Trial 3718 pruned.
[I 2021-05-10 08:08:21,734] Trial 3719 pruned.
[I 2021-05-10 08:08:22,287] Trial 3720 pruned.
[I 2021-05-10 08:08:22,863] Trial 3721 pruned.
[I 2021-05-10 08:08:23,442] Trial 3722 pruned.
[I 2021-05-10 08:08:24,006] Trial 3723 pruned.
[I 2021-05-10 08:08:24,581] Trial 3724 pruned.
[I 2021-05-10 08:08:25,157] Trial 3725 pruned.
[I 2021-05-10 08:08:25,741] Trial 3726 pruned.
[I 2021-05-10 08:08:26,316] Trial 3727 pruned.
[I 2021-05-10 08:08:26,895] Trial 3728 pruned.
[I 2021-05-10 08:08:27,464] Trial 3729 pruned.
[I 2021-05-10 08:08:27,662] Trial 3730 pruned.
[I 2021-05-10 08:08:28,238] Trial 3731 pruned.
[I 2021-05-10 08:08:28,804] Trial 3732 pruned.
[I 2021-05-10 08:08:29,382] Trial 3733 pruned.
[I 2021-05-10 08:08:29,954] Trial 3734 pruned.
[I 2021-05-10 08:08:30,525] Trial 3735 pruned.
[I 2021-05-10 08:08:31,104] Trial 3736 pruned.
[I 2021-05-10 08:08:31,683] Trial 3737 pruned.
[I 2021-05-10 08:08:32,002] Trial 3738 pruned.
[I 2021-05-10 08:08:32,137] Trial 3739 pruned.
[I 2021-05-10 08:08:32,713] Trial 3740 pruned.
[I 2021-05-10 08:08:33,266] Trial 3741 pruned.
[I 2021-05-10 08:08:33,841] Trial 3742 pruned.
[I 2021-05-10 08:08:34,416] Trial 3743 pruned.
[I 2021-05-10 08:08:34,980] Trial 3744 pruned.
[I 2021-05-10 08:08:35,553] Trial 3745 pruned.
[I 2021-05-10 08:08:36,125] Trial 3746 pruned.
[I 2021-05-10 08:08:36,696] Trial 3747 pruned.
[I 2021-05-10 08:08:37,275] Trial 3748 pruned.
[I 2021-05-10 08:08:37,850] Trial 3749 pruned.
[I 2021-05-10 08:08:38,420] Trial 3750 pruned.
[I 2021-05-10 08:08:38,988] Trial 3751 pruned.
[I 2021-05-10 08:08:39,562] Trial 3752 pruned.
[I 2021-05-10 08:08:40,127] Trial 3753 pruned.
[I 2021-05-10 08:08:40,705] Trial 3754 pruned.
[I 2021-05-10 08:08:40,901] Trial 3755 pruned.
[I 2021-05-10 08:08:41,467] Trial 3756 pruned.
[I 2021-05-10 08:08:42,039] Trial 3757 pruned.
[I 2021-05-10 08:08:42,613] Trial 3758 pruned.
[I 2021-05-10 08:08:43,183] Trial 3759 pruned.
[I 2021-05-10 08:08:43,763] Trial 3760 pruned.
[I 2021-05-10 08:08:44,338] Trial 3761 pruned.
[I 2021-05-10 08:08:44,906] Trial 3762 pruned.
[I 2021-05-10 08:08:45,482] Trial 3763 pruned.
[I 2021-05-10 08:08:46,055] Trial 3764 pruned.
[I 2021-05-10 08:08:46,635] Trial 3765 pruned.
[I 2021-05-10 08:08:46,775] Trial 3766 pruned.
[I 2021-05-10 08:08:47,352] Trial 3767 pruned.
[I 2021-05-10 08:08:47,909] Trial 3768 pruned.
[I 2021-05-10 08:08:48,487] Trial 3769 pruned.
[I 2021-05-10 08:08:49,065] Trial 3770 pruned.
[I 2021-05-10 08:08:49,631] Trial 3771 pruned.
[I 2021-05-10 08:08:50,195] Trial 3772 pruned.
[I 2021-05-10 08:08:50,768] Trial 3773 pruned.
[I 2021-05-10 08:08:51,087] Trial 3774 pruned.
[I 2021-05-10 08:08:51,662] Trial 3775 pruned.
[I 2021-05-10 08:08:52,239] Trial 3776 pruned.
[I 2021-05-10 08:08:52,812] Trial 3777 pruned.
[I 2021-05-10 08:08:53,394] Trial 3778 pruned.
[I 2021-05-10 08:08:53,976] Trial 3779 pruned.
[I 2021-05-10 08:08:54,546] Trial 3780 pruned.
[I 2021-05-10 08:08:55,121] Trial 3781 pruned.
[I 2021-05-10 08:08:55,700] Trial 3782 pruned.
[I 2021-05-10 08:08:56,267] Trial 3783 pruned.
[I 2021-05-10 08:08:56,464] Trial 3784 pruned.
[I 2021-05-10 08:08:57,041] Trial 3785 pruned.
[I 2021-05-10 08:08:57,607] Trial 3786 pruned.
[I 2021-05-10 08:08:58,183] Trial 3787 pruned.
[I 2021-05-10 08:08:58,760] Trial 3788 pruned.
[I 2021-05-10 08:08:59,335] Trial 3789 pruned.
[I 2021-05-10 08:08:59,914] Trial 3790 pruned.
[I 2021-05-10 08:09:00,493] Trial 3791 pruned.
[I 2021-05-10 08:09:01,068] Trial 3792 pruned.
[I 2021-05-10 08:09:01,653] Trial 3793 pruned.
[I 2021-05-10 08:09:02,229] Trial 3794 pruned.
[I 2021-05-10 08:09:02,360] Trial 3795 pruned.
[I 2021-05-10 08:09:02,941] Trial 3796 pruned.
[I 2021-05-10 08:09:03,508] Trial 3797 pruned.
[I 2021-05-10 08:09:03,828] Trial 3798 pruned.
[I 2021-05-10 08:09:04,404] Trial 3799 pruned.
[I 2021-05-10 08:09:04,983] Trial 3800 pruned.
[I 2021-05-10 08:09:05,547] Trial 3801 pruned.
[I 2021-05-10 08:09:06,119] Trial 3802 pruned.
[I 2021-05-10 08:09:06,700] Trial 3803 pruned.
[I 2021-05-10 08:09:07,272] Trial 3804 pruned.
[I 2021-05-10 08:09:07,848] Trial 3805 pruned.
[I 2021-05-10 08:09:08,425] Trial 3806 pruned.
[I 2021-05-10 08:09:08,995] Trial 3807 pruned.
[I 2021-05-10 08:09:09,578] Trial 3808 pruned.
[I 2021-05-10 08:09:10,155] Trial 3809 pruned.
[I 2021-05-10 08:09:10,728] Trial 3810 pruned.
[I 2021-05-10 08:09:11,307] Trial 3811 pruned.
[I 2021-05-10 08:09:11,887] Trial 3812 pruned.
[I 2021-05-10 08:09:12,082] Trial 3813 pruned.
[I 2021-05-10 08:09:12,656] Trial 3814 pruned.
[I 2021-05-10 08:09:13,234] Trial 3815 pruned.
[I 2021-05-10 08:09:13,806] Trial 3816 pruned.
[I 2021-05-10 08:09:14,385] Trial 3817 pruned.
[I 2021-05-10 08:09:14,961] Trial 3818 pruned.
[I 2021-05-10 08:09:15,528] Trial 3819 pruned.
[I 2021-05-10 08:09:16,106] Trial 3820 pruned.
[I 2021-05-10 08:09:16,679] Trial 3821 pruned.
[I 2021-05-10 08:09:17,239] Trial 3822 pruned.
[I 2021-05-10 08:09:17,825] Trial 3823 pruned.
[I 2021-05-10 08:09:17,965] Trial 3824 pruned.
[I 2021-05-10 08:09:18,537] Trial 3825 pruned.
[I 2021-05-10 08:09:19,117] Trial 3826 pruned.
[I 2021-05-10 08:09:19,690] Trial 3827 pruned.
[I 2021-05-10 08:09:20,011] Trial 3828 pruned.
[I 2021-05-10 08:09:20,575] Trial 3829 pruned.
[I 2021-05-10 08:09:21,152] Trial 3830 pruned.
[I 2021-05-10 08:09:21,724] Trial 3831 pruned.
[I 2021-05-10 08:09:22,306] Trial 3832 pruned.
[I 2021-05-10 08:09:22,880] Trial 3833 pruned.
[I 2021-05-10 08:09:23,457] Trial 3834 pruned.
[I 2021-05-10 08:09:24,038] Trial 3835 pruned.
[I 2021-05-10 08:09:24,611] Trial 3836 pruned.
[I 2021-05-10 08:09:25,180] Trial 3837 pruned.
[I 2021-05-10 08:09:25,755] Trial 3838 pruned.
[I 2021-05-10 08:09:26,334] Trial 3839 pruned.
[I 2021-05-10 08:09:26,907] Trial 3840 pruned.
[I 2021-05-10 08:09:27,484] Trial 3841 pruned.
[I 2021-05-10 08:09:29,082] Trial 3842 pruned.
[I 2021-05-10 08:09:29,656] Trial 3843 pruned.
[I 2021-05-10 08:09:29,854] Trial 3844 pruned.
[I 2021-05-10 08:09:30,431] Trial 3845 pruned.
[I 2021-05-10 08:09:31,004] Trial 3846 pruned.
[I 2021-05-10 08:09:31,583] Trial 3847 pruned.
[I 2021-05-10 08:09:32,158] Trial 3848 pruned.
[I 2021-05-10 08:09:32,721] Trial 3849 pruned.
[I 2021-05-10 08:09:33,299] Trial 3850 pruned.
[I 2021-05-10 08:09:33,865] Trial 3851 pruned.
[I 2021-05-10 08:09:34,436] Trial 3852 pruned.
[I 2021-05-10 08:09:35,012] Trial 3853 pruned.
[I 2021-05-10 08:09:35,147] Trial 3854 pruned.
[I 2021-05-10 08:09:35,735] Trial 3855 pruned.
[I 2021-05-10 08:09:36,312] Trial 3856 pruned.
[I 2021-05-10 08:09:36,881] Trial 3857 pruned.
[I 2021-05-10 08:09:37,202] Trial 3858 pruned.
[I 2021-05-10 08:10:28,492] Trial 3859 finished with value: 479.3067626953125 and parameters: {'lr': 0.006150490058651818, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:10:29,066] Trial 3860 pruned.
[I 2021-05-10 08:10:29,638] Trial 3861 pruned.
[I 2021-05-10 08:10:30,219] Trial 3862 pruned.
[I 2021-05-10 08:10:30,798] Trial 3863 pruned.
[I 2021-05-10 08:10:31,371] Trial 3864 pruned.
[I 2021-05-10 08:10:31,950] Trial 3865 pruned.
[I 2021-05-10 08:10:32,533] Trial 3866 pruned.
[I 2021-05-10 08:10:33,104] Trial 3867 pruned.
[I 2021-05-10 08:10:33,691] Trial 3868 pruned.
[I 2021-05-10 08:10:34,272] Trial 3869 pruned.
[I 2021-05-10 08:10:34,846] Trial 3870 pruned.
[I 2021-05-10 08:10:35,047] Trial 3871 pruned.
[I 2021-05-10 08:10:35,625] Trial 3872 pruned.
[I 2021-05-10 08:10:36,193] Trial 3873 pruned.
[I 2021-05-10 08:10:36,773] Trial 3874 pruned.
[I 2021-05-10 08:10:37,352] Trial 3875 pruned.
[I 2021-05-10 08:10:37,907] Trial 3876 pruned.
[I 2021-05-10 08:10:38,485] Trial 3877 pruned.
[I 2021-05-10 08:10:39,058] Trial 3878 pruned.
[I 2021-05-10 08:10:39,633] Trial 3879 pruned.
[I 2021-05-10 08:10:40,212] Trial 3880 pruned.
[I 2021-05-10 08:10:40,791] Trial 3881 pruned.
[I 2021-05-10 08:10:40,927] Trial 3882 pruned.
[I 2021-05-10 08:10:41,496] Trial 3883 pruned.
[I 2021-05-10 08:10:42,074] Trial 3884 pruned.
[I 2021-05-10 08:10:42,647] Trial 3885 pruned.
[I 2021-05-10 08:10:42,974] Trial 3886 pruned.
[I 2021-05-10 08:10:43,552] Trial 3887 pruned.
[I 2021-05-10 08:10:44,126] Trial 3888 pruned.
[I 2021-05-10 08:10:44,708] Trial 3889 pruned.
[I 2021-05-10 08:10:45,287] Trial 3890 pruned.
[I 2021-05-10 08:10:45,859] Trial 3891 pruned.
[I 2021-05-10 08:10:46,447] Trial 3892 pruned.
[I 2021-05-10 08:10:47,028] Trial 3893 pruned.
[I 2021-05-10 08:10:47,602] Trial 3894 pruned.
[I 2021-05-10 08:10:48,183] Trial 3895 pruned.
[I 2021-05-10 08:10:48,764] Trial 3896 pruned.
[I 2021-05-10 08:10:49,333] Trial 3897 pruned.
[I 2021-05-10 08:10:49,916] Trial 3898 pruned.
[I 2021-05-10 08:10:50,493] Trial 3899 pruned.
[I 2021-05-10 08:10:50,691] Trial 3900 pruned.
[I 2021-05-10 08:10:51,270] Trial 3901 pruned.
[I 2021-05-10 08:10:51,848] Trial 3902 pruned.
[I 2021-05-10 08:10:52,410] Trial 3903 pruned.
[I 2021-05-10 08:10:52,999] Trial 3904 pruned.
[I 2021-05-10 08:10:53,580] Trial 3905 pruned.
[I 2021-05-10 08:10:54,143] Trial 3906 pruned.
[I 2021-05-10 08:10:54,724] Trial 3907 pruned.
[I 2021-05-10 08:10:55,309] Trial 3908 pruned.
[I 2021-05-10 08:10:55,875] Trial 3909 pruned.
[I 2021-05-10 08:10:56,453] Trial 3910 pruned.
[I 2021-05-10 08:10:57,033] Trial 3911 pruned.
[I 2021-05-10 08:10:57,170] Trial 3912 pruned.
[I 2021-05-10 08:10:57,746] Trial 3913 pruned.
[I 2021-05-10 08:10:58,324] Trial 3914 pruned.
[I 2021-05-10 08:10:58,643] Trial 3915 pruned.
[I 2021-05-10 08:10:59,222] Trial 3916 pruned.
[I 2021-05-10 08:10:59,803] Trial 3917 pruned.
[I 2021-05-10 08:11:00,370] Trial 3918 pruned.
[I 2021-05-10 08:11:00,948] Trial 3919 pruned.
[I 2021-05-10 08:11:01,525] Trial 3920 pruned.
[I 2021-05-10 08:11:02,099] Trial 3921 pruned.
[I 2021-05-10 08:11:02,681] Trial 3922 pruned.
[I 2021-05-10 08:11:03,259] Trial 3923 pruned.
[I 2021-05-10 08:11:03,833] Trial 3924 pruned.
[I 2021-05-10 08:11:04,412] Trial 3925 pruned.
[I 2021-05-10 08:11:04,987] Trial 3926 pruned.
[I 2021-05-10 08:11:05,555] Trial 3927 pruned.
[I 2021-05-10 08:11:06,136] Trial 3928 pruned.
[I 2021-05-10 08:11:06,721] Trial 3929 pruned.
[I 2021-05-10 08:11:06,921] Trial 3930 pruned.
[I 2021-05-10 08:11:07,487] Trial 3931 pruned.
[I 2021-05-10 08:11:08,058] Trial 3932 pruned.
[I 2021-05-10 08:11:58,642] Trial 3933 finished with value: 395.08050537109375 and parameters: {'lr': 0.005161851878288235, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:11:59,223] Trial 3934 pruned.
[I 2021-05-10 08:11:59,805] Trial 3935 pruned.
[I 2021-05-10 08:12:00,379] Trial 3936 pruned.
[I 2021-05-10 08:12:00,954] Trial 3937 pruned.
[I 2021-05-10 08:12:01,522] Trial 3938 pruned.
[I 2021-05-10 08:12:02,088] Trial 3939 pruned.
[I 2021-05-10 08:12:02,671] Trial 3940 pruned.
[I 2021-05-10 08:12:03,255] Trial 3941 pruned.
[I 2021-05-10 08:12:03,831] Trial 3942 pruned.
[I 2021-05-10 08:12:03,970] Trial 3943 pruned.
[I 2021-05-10 08:12:04,299] Trial 3944 pruned.
[I 2021-05-10 08:12:04,874] Trial 3945 pruned.
[I 2021-05-10 08:12:05,451] Trial 3946 pruned.
[I 2021-05-10 08:12:06,031] Trial 3947 pruned.
[I 2021-05-10 08:12:06,597] Trial 3948 pruned.
[I 2021-05-10 08:12:07,176] Trial 3949 pruned.
[I 2021-05-10 08:12:07,763] Trial 3950 pruned.
[I 2021-05-10 08:12:09,342] Trial 3951 pruned.
[I 2021-05-10 08:12:09,926] Trial 3952 pruned.
[I 2021-05-10 08:12:10,505] Trial 3953 pruned.
[I 2021-05-10 08:12:11,077] Trial 3954 pruned.
[I 2021-05-10 08:12:11,655] Trial 3955 pruned.
[I 2021-05-10 08:12:12,235] Trial 3956 pruned.
[I 2021-05-10 08:12:12,802] Trial 3957 pruned.
[I 2021-05-10 08:12:13,383] Trial 3958 pruned.
[I 2021-05-10 08:12:13,584] Trial 3959 pruned.
[I 2021-05-10 08:12:14,143] Trial 3960 pruned.
[I 2021-05-10 08:12:14,723] Trial 3961 pruned.
[I 2021-05-10 08:12:15,303] Trial 3962 pruned.
[I 2021-05-10 08:12:15,874] Trial 3963 pruned.
[I 2021-05-10 08:12:16,448] Trial 3964 pruned.
[I 2021-05-10 08:12:17,031] Trial 3965 pruned.
[I 2021-05-10 08:12:17,598] Trial 3966 pruned.
[I 2021-05-10 08:12:18,179] Trial 3967 pruned.
[I 2021-05-10 08:12:18,756] Trial 3968 pruned.
[I 2021-05-10 08:12:19,327] Trial 3969 pruned.
[I 2021-05-10 08:12:19,467] Trial 3970 pruned.
[I 2021-05-10 08:13:10,860] Trial 3971 finished with value: 381.2845764160156 and parameters: {'lr': 0.004536227533790192, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:13:11,183] Trial 3972 pruned.
[I 2021-05-10 08:13:11,763] Trial 3973 pruned.
[I 2021-05-10 08:13:12,344] Trial 3974 pruned.
[I 2021-05-10 08:13:12,919] Trial 3975 pruned.
[I 2021-05-10 08:13:13,502] Trial 3976 pruned.
[I 2021-05-10 08:13:14,084] Trial 3977 pruned.
[I 2021-05-10 08:13:14,652] Trial 3978 pruned.
[I 2021-05-10 08:13:15,232] Trial 3979 pruned.
[I 2021-05-10 08:13:15,813] Trial 3980 pruned.
[I 2021-05-10 08:13:16,382] Trial 3981 pruned.
[I 2021-05-10 08:13:16,963] Trial 3982 pruned.
[I 2021-05-10 08:13:17,542] Trial 3983 pruned.
[I 2021-05-10 08:13:18,112] Trial 3984 pruned.
[I 2021-05-10 08:13:19,694] Trial 3985 pruned.
[I 2021-05-10 08:13:20,265] Trial 3986 pruned.
[I 2021-05-10 08:13:20,834] Trial 3987 pruned.
[I 2021-05-10 08:13:21,036] Trial 3988 pruned.
[I 2021-05-10 08:13:21,614] Trial 3989 pruned.
[I 2021-05-10 08:13:22,190] Trial 3990 pruned.
[I 2021-05-10 08:13:22,769] Trial 3991 pruned.
[I 2021-05-10 08:13:23,353] Trial 3992 pruned.
[I 2021-05-10 08:13:23,930] Trial 3993 pruned.
[I 2021-05-10 08:13:24,511] Trial 3994 pruned.
[I 2021-05-10 08:13:25,093] Trial 3995 pruned.
[I 2021-05-10 08:13:25,669] Trial 3996 pruned.
[I 2021-05-10 08:13:26,253] Trial 3997 pruned.
[I 2021-05-10 08:13:26,835] Trial 3998 pruned.
[I 2021-05-10 08:13:26,972] Trial 3999 pruned.
[I 2021-05-10 08:13:27,550] Trial 4000 pruned.
[I 2021-05-10 08:13:27,877] Trial 4001 pruned.
[I 2021-05-10 08:13:28,449] Trial 4002 pruned.
[I 2021-05-10 08:13:29,026] Trial 4003 pruned.
[I 2021-05-10 08:13:29,604] Trial 4004 pruned.
[I 2021-05-10 08:13:30,179] Trial 4005 pruned.
[I 2021-05-10 08:13:30,765] Trial 4006 pruned.
[I 2021-05-10 08:13:31,351] Trial 4007 pruned.
[I 2021-05-10 08:13:31,924] Trial 4008 pruned.
[I 2021-05-10 08:13:32,505] Trial 4009 pruned.
[I 2021-05-10 08:13:33,087] Trial 4010 pruned.
[I 2021-05-10 08:13:33,662] Trial 4011 pruned.
[I 2021-05-10 08:13:34,233] Trial 4012 pruned.
[I 2021-05-10 08:13:34,808] Trial 4013 pruned.
[I 2021-05-10 08:13:35,383] Trial 4014 pruned.
[I 2021-05-10 08:13:35,967] Trial 4015 pruned.
[I 2021-05-10 08:13:36,547] Trial 4016 pruned.
[I 2021-05-10 08:13:37,117] Trial 4017 pruned.
[I 2021-05-10 08:13:37,320] Trial 4018 pruned.
[I 2021-05-10 08:13:37,900] Trial 4019 pruned.
[I 2021-05-10 08:13:38,460] Trial 4020 pruned.
[I 2021-05-10 08:13:39,042] Trial 4021 pruned.
[I 2021-05-10 08:13:39,623] Trial 4022 pruned.
[I 2021-05-10 08:13:40,194] Trial 4023 pruned.
[I 2021-05-10 08:13:40,773] Trial 4024 pruned.
[I 2021-05-10 08:13:41,355] Trial 4025 pruned.
[I 2021-05-10 08:13:41,926] Trial 4026 pruned.
[I 2021-05-10 08:13:42,506] Trial 4027 pruned.
[I 2021-05-10 08:13:42,648] Trial 4028 pruned.
[I 2021-05-10 08:13:43,221] Trial 4029 pruned.
[I 2021-05-10 08:13:43,801] Trial 4030 pruned.
[I 2021-05-10 08:13:44,130] Trial 4031 pruned.
[I 2021-05-10 08:13:44,708] Trial 4032 pruned.
[I 2021-05-10 08:13:45,292] Trial 4033 pruned.
[I 2021-05-10 08:13:45,871] Trial 4034 pruned.
[I 2021-05-10 08:13:46,443] Trial 4035 pruned.
[I 2021-05-10 08:13:47,022] Trial 4036 pruned.
[I 2021-05-10 08:13:47,602] Trial 4037 pruned.
[I 2021-05-10 08:13:49,189] Trial 4038 pruned.
[I 2021-05-10 08:13:49,770] Trial 4039 pruned.
[I 2021-05-10 08:13:50,353] Trial 4040 pruned.
[I 2021-05-10 08:13:50,910] Trial 4041 pruned.
[I 2021-05-10 08:13:51,490] Trial 4042 pruned.
[I 2021-05-10 08:13:52,066] Trial 4043 pruned.
[I 2021-05-10 08:13:52,636] Trial 4044 pruned.
[I 2021-05-10 08:14:44,041] Trial 4045 finished with value: 377.07684326171875 and parameters: {'lr': 0.0035747179162953007, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:14:44,243] Trial 4046 pruned.
[I 2021-05-10 08:14:44,812] Trial 4047 pruned.
[I 2021-05-10 08:14:45,388] Trial 4048 pruned.
[I 2021-05-10 08:14:45,975] Trial 4049 pruned.
[I 2021-05-10 08:14:46,556] Trial 4050 pruned.
[I 2021-05-10 08:14:47,139] Trial 4051 pruned.
[I 2021-05-10 08:14:47,723] Trial 4052 pruned.
[I 2021-05-10 08:14:48,294] Trial 4053 pruned.
[I 2021-05-10 08:14:48,877] Trial 4054 pruned.
[I 2021-05-10 08:14:50,486] Trial 4055 pruned.
[I 2021-05-10 08:14:51,057] Trial 4056 pruned.
[I 2021-05-10 08:14:51,195] Trial 4057 pruned.
[I 2021-05-10 08:14:51,779] Trial 4058 pruned.
[I 2021-05-10 08:14:52,352] Trial 4059 pruned.
[I 2021-05-10 08:14:52,700] Trial 4060 pruned.
[I 2021-05-10 08:14:53,286] Trial 4061 pruned.
[I 2021-05-10 08:14:53,863] Trial 4062 pruned.
[I 2021-05-10 08:14:54,458] Trial 4063 pruned.
[I 2021-05-10 08:14:55,041] Trial 4064 pruned.
[I 2021-05-10 08:14:55,619] Trial 4065 pruned.
[I 2021-05-10 08:14:56,193] Trial 4066 pruned.
[I 2021-05-10 08:14:56,776] Trial 4067 pruned.
[I 2021-05-10 08:14:57,341] Trial 4068 pruned.
[I 2021-05-10 08:14:57,923] Trial 4069 pruned.
[I 2021-05-10 08:14:58,506] Trial 4070 pruned.
[I 2021-05-10 08:14:59,080] Trial 4071 pruned.
[I 2021-05-10 08:14:59,666] Trial 4072 pruned.
[I 2021-05-10 08:15:00,255] Trial 4073 pruned.
[I 2021-05-10 08:15:00,828] Trial 4074 pruned.
[I 2021-05-10 08:15:01,027] Trial 4075 pruned.
[I 2021-05-10 08:15:01,607] Trial 4076 pruned.
[I 2021-05-10 08:15:02,180] Trial 4077 pruned.
[I 2021-05-10 08:15:02,765] Trial 4078 pruned.
[I 2021-05-10 08:15:03,347] Trial 4079 pruned.
[I 2021-05-10 08:15:03,921] Trial 4080 pruned.
[I 2021-05-10 08:15:04,505] Trial 4081 pruned.
[I 2021-05-10 08:15:54,400] Trial 4082 pruned.
[I 2021-05-10 08:15:54,972] Trial 4083 pruned.
[I 2021-05-10 08:15:55,551] Trial 4084 pruned.
[I 2021-05-10 08:15:56,135] Trial 4085 pruned.
[I 2021-05-10 08:15:56,271] Trial 4086 pruned.
[I 2021-05-10 08:15:56,852] Trial 4087 pruned.
[I 2021-05-10 08:15:57,441] Trial 4088 pruned.
[I 2021-05-10 08:15:57,767] Trial 4089 pruned.
[I 2021-05-10 08:15:58,354] Trial 4090 pruned.
[I 2021-05-10 08:15:58,932] Trial 4091 pruned.
[I 2021-05-10 08:15:59,505] Trial 4092 pruned.
[I 2021-05-10 08:16:00,089] Trial 4093 pruned.
[I 2021-05-10 08:16:00,670] Trial 4094 pruned.
[I 2021-05-10 08:16:01,248] Trial 4095 pruned.
[I 2021-05-10 08:16:01,829] Trial 4096 pruned.
[I 2021-05-10 08:16:02,414] Trial 4097 pruned.
[I 2021-05-10 08:16:02,982] Trial 4098 pruned.
[I 2021-05-10 08:16:03,562] Trial 4099 pruned.
[I 2021-05-10 08:16:04,147] Trial 4100 pruned.
[I 2021-05-10 08:16:04,715] Trial 4101 pruned.
[I 2021-05-10 08:16:05,297] Trial 4102 pruned.
[I 2021-05-10 08:16:05,877] Trial 4103 pruned.
[I 2021-05-10 08:16:06,450] Trial 4104 pruned.
[I 2021-05-10 08:16:07,034] Trial 4105 pruned.
[I 2021-05-10 08:16:07,238] Trial 4106 pruned.
[I 2021-05-10 08:16:07,815] Trial 4107 pruned.
[I 2021-05-10 08:16:08,398] Trial 4108 pruned.
[I 2021-05-10 08:16:08,982] Trial 4109 pruned.
[I 2021-05-10 08:16:09,557] Trial 4110 pruned.
[I 2021-05-10 08:16:10,141] Trial 4111 pruned.
[I 2021-05-10 08:16:10,733] Trial 4112 pruned.
[I 2021-05-10 08:16:11,315] Trial 4113 pruned.
[I 2021-05-10 08:16:12,020] Trial 4114 pruned.
[I 2021-05-10 08:16:12,603] Trial 4115 pruned.
[I 2021-05-10 08:16:12,743] Trial 4116 pruned.
[I 2021-05-10 08:16:13,327] Trial 4117 pruned.
[I 2021-05-10 08:16:13,913] Trial 4118 pruned.
[I 2021-05-10 08:16:14,491] Trial 4119 pruned.
[I 2021-05-10 08:16:15,063] Trial 4120 pruned.
[I 2021-05-10 08:16:15,391] Trial 4121 pruned.
[I 2021-05-10 08:16:15,964] Trial 4122 pruned.
[I 2021-05-10 08:16:16,544] Trial 4123 pruned.
[I 2021-05-10 08:16:17,128] Trial 4124 pruned.
[I 2021-05-10 08:16:17,707] Trial 4125 pruned.
[I 2021-05-10 08:16:18,279] Trial 4126 pruned.
[I 2021-05-10 08:16:18,864] Trial 4127 pruned.
[I 2021-05-10 08:16:19,426] Trial 4128 pruned.
[I 2021-05-10 08:16:20,010] Trial 4129 pruned.
[I 2021-05-10 08:16:20,590] Trial 4130 pruned.
[I 2021-05-10 08:16:21,166] Trial 4131 pruned.
[I 2021-05-10 08:16:21,776] Trial 4132 pruned.
[I 2021-05-10 08:16:22,363] Trial 4133 pruned.
[I 2021-05-10 08:16:22,937] Trial 4134 pruned.
[I 2021-05-10 08:16:23,143] Trial 4135 pruned.
[I 2021-05-10 08:16:23,726] Trial 4136 pruned.
[I 2021-05-10 08:16:24,310] Trial 4137 pruned.
[I 2021-05-10 08:16:24,889] Trial 4138 pruned.
[I 2021-05-10 08:16:25,475] Trial 4139 pruned.
[I 2021-05-10 08:16:26,049] Trial 4140 pruned.
[I 2021-05-10 08:16:26,628] Trial 4141 pruned.
[I 2021-05-10 08:16:27,210] Trial 4142 pruned.
[I 2021-05-10 08:16:27,790] Trial 4143 pruned.
[I 2021-05-10 08:16:28,373] Trial 4144 pruned.
[I 2021-05-10 08:16:28,512] Trial 4145 pruned.
[I 2021-05-10 08:16:29,091] Trial 4146 pruned.
[I 2021-05-10 08:16:29,671] Trial 4147 pruned.
[I 2021-05-10 08:16:30,001] Trial 4148 pruned.
[I 2021-05-10 08:16:30,558] Trial 4149 pruned.
[I 2021-05-10 08:16:31,145] Trial 4150 pruned.
[I 2021-05-10 08:16:31,731] Trial 4151 pruned.
[I 2021-05-10 08:16:32,312] Trial 4152 pruned.
[I 2021-05-10 08:16:32,899] Trial 4153 pruned.
[I 2021-05-10 08:16:33,491] Trial 4154 pruned.
[I 2021-05-10 08:16:34,059] Trial 4155 pruned.
[I 2021-05-10 08:16:34,645] Trial 4156 pruned.
[I 2021-05-10 08:16:35,228] Trial 4157 pruned.
[I 2021-05-10 08:16:35,801] Trial 4158 pruned.
[I 2021-05-10 08:16:36,385] Trial 4159 pruned.
[I 2021-05-10 08:16:36,969] Trial 4160 pruned.
[I 2021-05-10 08:16:37,543] Trial 4161 pruned.
[I 2021-05-10 08:16:38,126] Trial 4162 pruned.
[I 2021-05-10 08:16:38,713] Trial 4163 pruned.
[I 2021-05-10 08:16:38,917] Trial 4164 pruned.
[I 2021-05-10 08:16:39,506] Trial 4165 pruned.
[I 2021-05-10 08:16:40,092] Trial 4166 pruned.
[I 2021-05-10 08:16:45,237] Trial 4167 pruned.
[I 2021-05-10 08:16:45,822] Trial 4168 pruned.
[I 2021-05-10 08:16:46,415] Trial 4169 pruned.
[I 2021-05-10 08:16:47,000] Trial 4170 pruned.
[I 2021-05-10 08:16:47,586] Trial 4171 pruned.
[I 2021-05-10 08:16:48,171] Trial 4172 pruned.
[I 2021-05-10 08:16:48,747] Trial 4173 pruned.
[I 2021-05-10 08:16:48,888] Trial 4174 pruned.
[I 2021-05-10 08:16:49,473] Trial 4175 pruned.
[I 2021-05-10 08:16:49,793] Trial 4176 pruned.
[I 2021-05-10 08:16:50,379] Trial 4177 pruned.
[I 2021-05-10 08:16:50,962] Trial 4178 pruned.
[I 2021-05-10 08:16:51,539] Trial 4179 pruned.
[I 2021-05-10 08:16:52,120] Trial 4180 pruned.
[I 2021-05-10 08:16:52,709] Trial 4181 pruned.
[I 2021-05-10 08:16:53,273] Trial 4182 pruned.
[I 2021-05-10 08:16:53,860] Trial 4183 pruned.
[I 2021-05-10 08:16:54,443] Trial 4184 pruned.
[I 2021-05-10 08:16:55,024] Trial 4185 pruned.
[I 2021-05-10 08:16:55,611] Trial 4186 pruned.
[I 2021-05-10 08:16:56,198] Trial 4187 pruned.
[I 2021-05-10 08:16:56,772] Trial 4188 pruned.
[I 2021-05-10 08:16:57,356] Trial 4189 pruned.
[I 2021-05-10 08:16:57,941] Trial 4190 pruned.
[I 2021-05-10 08:16:58,143] Trial 4191 pruned.
[I 2021-05-10 08:16:58,728] Trial 4192 pruned.
[I 2021-05-10 08:16:59,315] Trial 4193 pruned.
[I 2021-05-10 08:16:59,894] Trial 4194 pruned.
[I 2021-05-10 08:17:00,479] Trial 4195 pruned.
[I 2021-05-10 08:17:01,069] Trial 4196 pruned.
[I 2021-05-10 08:17:01,643] Trial 4197 pruned.
[I 2021-05-10 08:17:02,227] Trial 4198 pruned.
[I 2021-05-10 08:17:02,811] Trial 4199 pruned.
[I 2021-05-10 08:17:03,393] Trial 4200 pruned.
[I 2021-05-10 08:17:03,981] Trial 4201 pruned.
[I 2021-05-10 08:17:04,122] Trial 4202 pruned.
[I 2021-05-10 08:17:04,693] Trial 4203 pruned.
[I 2021-05-10 08:17:05,275] Trial 4204 pruned.
[I 2021-05-10 08:17:05,606] Trial 4205 pruned.
[I 2021-05-10 08:17:06,182] Trial 4206 pruned.
[I 2021-05-10 08:17:06,768] Trial 4207 pruned.
[I 2021-05-10 08:17:07,373] Trial 4208 pruned.
[I 2021-05-10 08:17:07,943] Trial 4209 pruned.
[I 2021-05-10 08:17:08,524] Trial 4210 pruned.
[I 2021-05-10 08:17:09,113] Trial 4211 pruned.
[I 2021-05-10 08:17:09,695] Trial 4212 pruned.
[I 2021-05-10 08:17:10,281] Trial 4213 pruned.
[I 2021-05-10 08:17:10,867] Trial 4214 pruned.
[I 2021-05-10 08:17:11,443] Trial 4215 pruned.
[I 2021-05-10 08:17:12,031] Trial 4216 pruned.
[I 2021-05-10 08:17:12,618] Trial 4217 pruned.
[I 2021-05-10 08:17:13,193] Trial 4218 pruned.
[I 2021-05-10 08:17:13,781] Trial 4219 pruned.
[I 2021-05-10 08:17:13,987] Trial 4220 pruned.
[I 2021-05-10 08:17:14,558] Trial 4221 pruned.
[I 2021-05-10 08:17:15,142] Trial 4222 pruned.
[I 2021-05-10 08:17:15,726] Trial 4223 pruned.
[I 2021-05-10 08:17:16,299] Trial 4224 pruned.
[I 2021-05-10 08:17:16,885] Trial 4225 pruned.
[I 2021-05-10 08:17:17,473] Trial 4226 pruned.
[I 2021-05-10 08:17:18,048] Trial 4227 pruned.
[I 2021-05-10 08:17:18,619] Trial 4228 pruned.
[I 2021-05-10 08:17:19,207] Trial 4229 pruned.
[I 2021-05-10 08:17:19,776] Trial 4230 pruned.
[I 2021-05-10 08:17:20,361] Trial 4231 pruned.
[I 2021-05-10 08:17:20,944] Trial 4232 pruned.
[I 2021-05-10 08:17:21,085] Trial 4233 pruned.
[I 2021-05-10 08:17:21,421] Trial 4234 pruned.
[I 2021-05-10 08:17:22,005] Trial 4235 pruned.
[I 2021-05-10 08:17:22,569] Trial 4236 pruned.
[I 2021-05-10 08:17:23,155] Trial 4237 pruned.
[I 2021-05-10 08:17:23,740] Trial 4238 pruned.
[I 2021-05-10 08:17:24,315] Trial 4239 pruned.
[I 2021-05-10 08:17:24,900] Trial 4240 pruned.
[I 2021-05-10 08:17:25,482] Trial 4241 pruned.
[I 2021-05-10 08:17:26,054] Trial 4242 pruned.
[I 2021-05-10 08:17:26,637] Trial 4243 pruned.
[I 2021-05-10 08:17:27,222] Trial 4244 pruned.
[I 2021-05-10 08:17:27,796] Trial 4245 pruned.
[I 2021-05-10 08:17:28,383] Trial 4246 pruned.
[I 2021-05-10 08:18:19,803] Trial 4247 finished with value: 371.6463623046875 and parameters: {'lr': 0.00527877958964501, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:18:20,386] Trial 4248 pruned.
[I 2021-05-10 08:18:20,973] Trial 4249 pruned.
[I 2021-05-10 08:18:21,554] Trial 4250 pruned.
[I 2021-05-10 08:18:22,135] Trial 4251 pruned.
[I 2021-05-10 08:18:22,346] Trial 4252 pruned.
[I 2021-05-10 08:18:22,931] Trial 4253 pruned.
[I 2021-05-10 08:18:23,514] Trial 4254 pruned.
[I 2021-05-10 08:18:24,096] Trial 4255 pruned.
[I 2021-05-10 08:18:24,685] Trial 4256 pruned.
[I 2021-05-10 08:19:16,122] Trial 4257 finished with value: 391.7796325683594 and parameters: {'lr': 0.004914801146546865, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:19:16,709] Trial 4258 pruned.
[I 2021-05-10 08:19:17,291] Trial 4259 pruned.
[I 2021-05-10 08:19:17,872] Trial 4260 pruned.
[I 2021-05-10 08:19:18,460] Trial 4261 pruned.
[I 2021-05-10 08:19:18,606] Trial 4262 pruned.
[I 2021-05-10 08:19:19,194] Trial 4263 pruned.
[I 2021-05-10 08:19:19,773] Trial 4264 pruned.
[I 2021-05-10 08:19:20,108] Trial 4265 pruned.
[I 2021-05-10 08:19:20,697] Trial 4266 pruned.
[I 2021-05-10 08:19:21,287] Trial 4267 pruned.
[I 2021-05-10 08:19:21,875] Trial 4268 pruned.
[I 2021-05-10 08:19:22,459] Trial 4269 pruned.
[I 2021-05-10 08:19:23,048] Trial 4270 pruned.
[I 2021-05-10 08:19:23,632] Trial 4271 pruned.
[I 2021-05-10 08:19:24,223] Trial 4272 pruned.
[I 2021-05-10 08:19:24,809] Trial 4273 pruned.
[I 2021-05-10 08:19:25,393] Trial 4274 pruned.
[I 2021-05-10 08:19:25,980] Trial 4275 pruned.
[I 2021-05-10 08:20:17,343] Trial 4276 finished with value: 369.0662536621094 and parameters: {'lr': 0.004618089398316016, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:20:17,929] Trial 4277 pruned.
[I 2021-05-10 08:20:18,516] Trial 4278 pruned.
[I 2021-05-10 08:20:19,103] Trial 4279 pruned.
[I 2021-05-10 08:20:19,311] Trial 4280 pruned.
[I 2021-05-10 08:20:19,898] Trial 4281 pruned.
[I 2021-05-10 08:20:20,482] Trial 4282 pruned.
[I 2021-05-10 08:20:21,070] Trial 4283 pruned.
[I 2021-05-10 08:20:21,648] Trial 4284 pruned.
[I 2021-05-10 08:20:22,232] Trial 4285 pruned.
[I 2021-05-10 08:20:22,806] Trial 4286 pruned.
[I 2021-05-10 08:20:23,398] Trial 4287 pruned.
[I 2021-05-10 08:20:23,986] Trial 4288 pruned.
[I 2021-05-10 08:20:24,569] Trial 4289 pruned.
[I 2021-05-10 08:20:25,152] Trial 4290 pruned.
[I 2021-05-10 08:20:25,739] Trial 4291 pruned.
[I 2021-05-10 08:20:25,881] Trial 4292 pruned.
[I 2021-05-10 08:20:26,465] Trial 4293 pruned.
[I 2021-05-10 08:20:26,800] Trial 4294 pruned.
[I 2021-05-10 08:20:27,389] Trial 4295 pruned.
[I 2021-05-10 08:20:27,971] Trial 4296 pruned.
[I 2021-05-10 08:20:28,554] Trial 4297 pruned.
[I 2021-05-10 08:20:29,138] Trial 4298 pruned.
[I 2021-05-10 08:20:29,727] Trial 4299 pruned.
[I 2021-05-10 08:20:30,313] Trial 4300 pruned.
[I 2021-05-10 08:20:30,894] Trial 4301 pruned.
[I 2021-05-10 08:20:31,486] Trial 4302 pruned.
[I 2021-05-10 08:20:32,070] Trial 4303 pruned.
[I 2021-05-10 08:20:32,670] Trial 4304 pruned.
[I 2021-05-10 08:20:33,253] Trial 4305 pruned.
[I 2021-05-10 08:20:33,838] Trial 4306 pruned.
[I 2021-05-10 08:20:34,424] Trial 4307 pruned.
[I 2021-05-10 08:20:35,008] Trial 4308 pruned.
[I 2021-05-10 08:20:35,592] Trial 4309 pruned.
[I 2021-05-10 08:20:36,179] Trial 4310 pruned.
[I 2021-05-10 08:20:36,386] Trial 4311 pruned.
[I 2021-05-10 08:20:36,972] Trial 4312 pruned.
[I 2021-05-10 08:20:37,549] Trial 4313 pruned.
[I 2021-05-10 08:20:38,136] Trial 4314 pruned.
[I 2021-05-10 08:20:38,724] Trial 4315 pruned.
[I 2021-05-10 08:20:39,309] Trial 4316 pruned.
[I 2021-05-10 08:20:39,893] Trial 4317 pruned.
[I 2021-05-10 08:20:40,468] Trial 4318 pruned.
[I 2021-05-10 08:20:41,055] Trial 4319 pruned.
[I 2021-05-10 08:20:41,641] Trial 4320 pruned.
[I 2021-05-10 08:20:42,226] Trial 4321 pruned.
[I 2021-05-10 08:20:42,370] Trial 4322 pruned.
[I 2021-05-10 08:20:42,705] Trial 4323 pruned.
[I 2021-05-10 08:20:43,294] Trial 4324 pruned.
[I 2021-05-10 08:20:43,883] Trial 4325 pruned.
[I 2021-05-10 08:20:44,469] Trial 4326 pruned.
[I 2021-05-10 08:20:45,057] Trial 4327 pruned.
[I 2021-05-10 08:20:45,640] Trial 4328 pruned.
[I 2021-05-10 08:20:46,230] Trial 4329 pruned.
[I 2021-05-10 08:20:46,825] Trial 4330 pruned.
[I 2021-05-10 08:20:47,418] Trial 4331 pruned.
[I 2021-05-10 08:20:48,008] Trial 4332 pruned.
[I 2021-05-10 08:20:48,593] Trial 4333 pruned.
[I 2021-05-10 08:20:49,180] Trial 4334 pruned.
[I 2021-05-10 08:20:49,767] Trial 4335 pruned.
[I 2021-05-10 08:20:50,353] Trial 4336 pruned.
[I 2021-05-10 08:20:50,939] Trial 4337 pruned.
[I 2021-05-10 08:20:51,147] Trial 4338 pruned.
[I 2021-05-10 08:20:51,743] Trial 4339 pruned.
[I 2021-05-10 08:20:52,319] Trial 4340 pruned.
[I 2021-05-10 08:20:52,911] Trial 4341 pruned.
[I 2021-05-10 08:20:53,493] Trial 4342 pruned.
[I 2021-05-10 08:20:54,082] Trial 4343 pruned.
[I 2021-05-10 08:20:54,672] Trial 4344 pruned.
[I 2021-05-10 08:20:55,257] Trial 4345 pruned.
[I 2021-05-10 08:20:55,845] Trial 4346 pruned.
[I 2021-05-10 08:20:56,435] Trial 4347 pruned.
[I 2021-05-10 08:20:57,023] Trial 4348 pruned.
[I 2021-05-10 08:20:57,169] Trial 4349 pruned.
[I 2021-05-10 08:20:57,759] Trial 4350 pruned.
[I 2021-05-10 08:20:58,346] Trial 4351 pruned.
[I 2021-05-10 08:20:58,934] Trial 4352 pruned.
[I 2021-05-10 08:20:59,273] Trial 4353 pruned.
[I 2021-05-10 08:20:59,862] Trial 4354 pruned.
[I 2021-05-10 08:21:00,455] Trial 4355 pruned.
[I 2021-05-10 08:21:01,042] Trial 4356 pruned.
[I 2021-05-10 08:21:01,629] Trial 4357 pruned.
[I 2021-05-10 08:21:03,248] Trial 4358 pruned.
[I 2021-05-10 08:21:03,837] Trial 4359 pruned.
[I 2021-05-10 08:21:04,425] Trial 4360 pruned.
[I 2021-05-10 08:21:05,012] Trial 4361 pruned.
[I 2021-05-10 08:21:05,597] Trial 4362 pruned.
[I 2021-05-10 08:21:06,185] Trial 4363 pruned.
[I 2021-05-10 08:21:06,773] Trial 4364 pruned.
[I 2021-05-10 08:21:07,350] Trial 4365 pruned.
[I 2021-05-10 08:21:07,936] Trial 4366 pruned.
[I 2021-05-10 08:21:08,144] Trial 4367 pruned.
[I 2021-05-10 08:21:08,732] Trial 4368 pruned.
[I 2021-05-10 08:22:00,084] Trial 4369 finished with value: 375.1161193847656 and parameters: {'lr': 0.004114895373698678, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:22:00,671] Trial 4370 pruned.
[I 2021-05-10 08:22:01,259] Trial 4371 pruned.
[I 2021-05-10 08:22:01,833] Trial 4372 pruned.
[I 2021-05-10 08:22:02,422] Trial 4373 pruned.
[I 2021-05-10 08:22:03,008] Trial 4374 pruned.
[I 2021-05-10 08:22:03,594] Trial 4375 pruned.
[I 2021-05-10 08:22:04,183] Trial 4376 pruned.
[I 2021-05-10 08:22:04,777] Trial 4377 pruned.
[I 2021-05-10 08:22:05,366] Trial 4378 pruned.
[I 2021-05-10 08:22:05,953] Trial 4379 pruned.
[I 2021-05-10 08:22:06,543] Trial 4380 pruned.
[I 2021-05-10 08:22:07,132] Trial 4381 pruned.
[I 2021-05-10 08:22:07,279] Trial 4382 pruned.
[I 2021-05-10 08:22:07,865] Trial 4383 pruned.
[I 2021-05-10 08:22:08,451] Trial 4384 pruned.
[I 2021-05-10 08:22:08,787] Trial 4385 pruned.
[I 2021-05-10 08:22:09,374] Trial 4386 pruned.
[I 2021-05-10 08:22:09,958] Trial 4387 pruned.
[I 2021-05-10 08:22:10,548] Trial 4388 pruned.
[I 2021-05-10 08:22:11,139] Trial 4389 pruned.
[I 2021-05-10 08:22:11,728] Trial 4390 pruned.
[I 2021-05-10 08:22:12,315] Trial 4391 pruned.
[I 2021-05-10 08:22:12,896] Trial 4392 pruned.
[I 2021-05-10 08:22:13,480] Trial 4393 pruned.
[I 2021-05-10 08:22:14,058] Trial 4394 pruned.
[I 2021-05-10 08:22:14,642] Trial 4395 pruned.
[I 2021-05-10 08:22:14,852] Trial 4396 pruned.
[I 2021-05-10 08:22:15,442] Trial 4397 pruned.
[I 2021-05-10 08:22:16,027] Trial 4398 pruned.
[I 2021-05-10 08:22:16,614] Trial 4399 pruned.
[I 2021-05-10 08:22:17,198] Trial 4400 pruned.
[I 2021-05-10 08:22:17,791] Trial 4401 pruned.
[I 2021-05-10 08:22:18,370] Trial 4402 pruned.
[I 2021-05-10 08:22:18,958] Trial 4403 pruned.
[I 2021-05-10 08:22:19,544] Trial 4404 pruned.
[I 2021-05-10 08:22:20,132] Trial 4405 pruned.
[I 2021-05-10 08:22:20,727] Trial 4406 pruned.
[I 2021-05-10 08:22:20,873] Trial 4407 pruned.
[I 2021-05-10 08:22:21,464] Trial 4408 pruned.
[I 2021-05-10 08:22:22,050] Trial 4409 pruned.
[I 2021-05-10 08:22:22,634] Trial 4410 pruned.
[I 2021-05-10 08:22:22,968] Trial 4411 pruned.
[I 2021-05-10 08:22:23,550] Trial 4412 pruned.
[I 2021-05-10 08:22:24,140] Trial 4413 pruned.
[I 2021-05-10 08:22:24,729] Trial 4414 pruned.
[I 2021-05-10 08:22:25,321] Trial 4415 pruned.
[I 2021-05-10 08:22:25,909] Trial 4416 pruned.
[I 2021-05-10 08:22:26,495] Trial 4417 pruned.
[I 2021-05-10 08:22:27,083] Trial 4418 pruned.
[I 2021-05-10 08:22:27,662] Trial 4419 pruned.
[I 2021-05-10 08:22:28,254] Trial 4420 pruned.
[I 2021-05-10 08:22:28,844] Trial 4421 pruned.
[I 2021-05-10 08:22:29,432] Trial 4422 pruned.
[I 2021-05-10 08:22:30,006] Trial 4423 pruned.
[I 2021-05-10 08:22:30,596] Trial 4424 pruned.
[I 2021-05-10 08:22:31,187] Trial 4425 pruned.
[I 2021-05-10 08:22:31,776] Trial 4426 pruned.
[I 2021-05-10 08:22:31,988] Trial 4427 pruned.
[I 2021-05-10 08:22:32,567] Trial 4428 pruned.
[I 2021-05-10 08:22:33,158] Trial 4429 pruned.
[I 2021-05-10 08:22:33,751] Trial 4430 pruned.
[I 2021-05-10 08:22:34,340] Trial 4431 pruned.
[I 2021-05-10 08:22:34,930] Trial 4432 pruned.
[I 2021-05-10 08:22:35,522] Trial 4433 pruned.
[I 2021-05-10 08:22:36,107] Trial 4434 pruned.
[I 2021-05-10 08:22:36,699] Trial 4435 pruned.
[I 2021-05-10 08:22:36,845] Trial 4436 pruned.
[I 2021-05-10 08:22:37,433] Trial 4437 pruned.
[I 2021-05-10 08:22:38,020] Trial 4438 pruned.
[I 2021-05-10 08:22:38,605] Trial 4439 pruned.
[I 2021-05-10 08:22:38,944] Trial 4440 pruned.
[I 2021-05-10 08:22:39,531] Trial 4441 pruned.
[I 2021-05-10 08:22:40,121] Trial 4442 pruned.
[I 2021-05-10 08:22:40,712] Trial 4443 pruned.
[I 2021-05-10 08:22:41,300] Trial 4444 pruned.
[I 2021-05-10 08:22:41,892] Trial 4445 pruned.
[I 2021-05-10 08:22:42,478] Trial 4446 pruned.
[I 2021-05-10 08:22:43,069] Trial 4447 pruned.
[I 2021-05-10 08:22:43,656] Trial 4448 pruned.
[I 2021-05-10 08:22:44,246] Trial 4449 pruned.
[I 2021-05-10 08:22:44,843] Trial 4450 pruned.
[I 2021-05-10 08:22:45,436] Trial 4451 pruned.
[I 2021-05-10 08:22:46,026] Trial 4452 pruned.
[I 2021-05-10 08:22:46,613] Trial 4453 pruned.
[I 2021-05-10 08:22:46,824] Trial 4454 pruned.
[I 2021-05-10 08:22:47,417] Trial 4455 pruned.
[I 2021-05-10 08:22:48,005] Trial 4456 pruned.
[I 2021-05-10 08:22:48,593] Trial 4457 pruned.
[I 2021-05-10 08:22:49,186] Trial 4458 pruned.
[I 2021-05-10 08:22:49,777] Trial 4459 pruned.
[I 2021-05-10 08:22:50,371] Trial 4460 pruned.
[I 2021-05-10 08:22:50,959] Trial 4461 pruned.
[I 2021-05-10 08:23:42,494] Trial 4462 finished with value: 392.1061096191406 and parameters: {'lr': 0.0050060857413643765, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:23:43,085] Trial 4463 pruned.
[I 2021-05-10 08:23:43,670] Trial 4464 pruned.
[I 2021-05-10 08:23:44,261] Trial 4465 pruned.
[I 2021-05-10 08:23:44,860] Trial 4466 pruned.
[I 2021-05-10 08:23:45,009] Trial 4467 pruned.
[I 2021-05-10 08:23:45,343] Trial 4468 pruned.
[I 2021-05-10 08:23:45,932] Trial 4469 pruned.
[I 2021-05-10 08:23:46,520] Trial 4470 pruned.
[I 2021-05-10 08:23:47,109] Trial 4471 pruned.
[I 2021-05-10 08:23:47,701] Trial 4472 pruned.
[I 2021-05-10 08:23:48,284] Trial 4473 pruned.
[I 2021-05-10 08:24:38,241] Trial 4474 pruned.
[I 2021-05-10 08:24:38,842] Trial 4475 pruned.
[I 2021-05-10 08:24:39,428] Trial 4476 pruned.
[I 2021-05-10 08:24:40,017] Trial 4477 pruned.
[I 2021-05-10 08:24:40,603] Trial 4478 pruned.
[I 2021-05-10 08:24:41,189] Trial 4479 pruned.
[I 2021-05-10 08:24:41,779] Trial 4480 pruned.
[I 2021-05-10 08:24:42,367] Trial 4481 pruned.
[I 2021-05-10 08:24:42,960] Trial 4482 pruned.
[I 2021-05-10 08:24:43,172] Trial 4483 pruned.
[I 2021-05-10 08:24:43,763] Trial 4484 pruned.
[I 2021-05-10 08:24:44,358] Trial 4485 pruned.
[I 2021-05-10 08:24:44,945] Trial 4486 pruned.
[I 2021-05-10 08:24:45,574] Trial 4487 pruned.
[I 2021-05-10 08:24:46,165] Trial 4488 pruned.
[I 2021-05-10 08:24:46,777] Trial 4489 pruned.
[I 2021-05-10 08:25:38,871] Trial 4490 finished with value: 377.9266052246094 and parameters: {'lr': 0.005678625144660136, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:25:39,460] Trial 4491 pruned.
[I 2021-05-10 08:25:40,051] Trial 4492 pruned.
[I 2021-05-10 08:25:40,638] Trial 4493 pruned.
[I 2021-05-10 08:25:41,227] Trial 4494 pruned.
[I 2021-05-10 08:25:41,375] Trial 4495 pruned.
[I 2021-05-10 08:25:41,966] Trial 4496 pruned.
[I 2021-05-10 08:25:42,557] Trial 4497 pruned.
[I 2021-05-10 08:25:42,893] Trial 4498 pruned.
[I 2021-05-10 08:25:43,483] Trial 4499 pruned.
[I 2021-05-10 08:25:44,063] Trial 4500 pruned.
[I 2021-05-10 08:25:44,650] Trial 4501 pruned.
[I 2021-05-10 08:25:45,232] Trial 4502 pruned.
[I 2021-05-10 08:25:45,820] Trial 4503 pruned.
[I 2021-05-10 08:25:46,415] Trial 4504 pruned.
[I 2021-05-10 08:25:47,001] Trial 4505 pruned.
[I 2021-05-10 08:25:47,579] Trial 4506 pruned.
[I 2021-05-10 08:25:48,172] Trial 4507 pruned.
[I 2021-05-10 08:25:48,760] Trial 4508 pruned.
[I 2021-05-10 08:25:49,352] Trial 4509 pruned.
[I 2021-05-10 08:25:49,939] Trial 4510 pruned.
[I 2021-05-10 08:25:50,524] Trial 4511 pruned.
[I 2021-05-10 08:25:51,113] Trial 4512 pruned.
[I 2021-05-10 08:25:51,324] Trial 4513 pruned.
[I 2021-05-10 08:25:51,916] Trial 4514 pruned.
[I 2021-05-10 08:25:52,508] Trial 4515 pruned.
[I 2021-05-10 08:25:53,102] Trial 4516 pruned.
[I 2021-05-10 08:25:53,689] Trial 4517 pruned.
[I 2021-05-10 08:25:54,282] Trial 4518 pruned.
[I 2021-05-10 08:25:54,873] Trial 4519 pruned.
[I 2021-05-10 08:25:55,460] Trial 4520 pruned.
[I 2021-05-10 08:25:56,049] Trial 4521 pruned.
[I 2021-05-10 08:25:56,640] Trial 4522 pruned.
[I 2021-05-10 08:25:57,216] Trial 4523 pruned.
[I 2021-05-10 08:25:57,364] Trial 4524 pruned.
[I 2021-05-10 08:25:57,956] Trial 4525 pruned.
[I 2021-05-10 08:25:58,293] Trial 4526 pruned.
[I 2021-05-10 08:25:58,877] Trial 4527 pruned.
[I 2021-05-10 08:25:59,455] Trial 4528 pruned.
[I 2021-05-10 08:26:00,047] Trial 4529 pruned.
[I 2021-05-10 08:26:00,621] Trial 4530 pruned.
[I 2021-05-10 08:26:01,211] Trial 4531 pruned.
[I 2021-05-10 08:26:01,798] Trial 4532 pruned.
[I 2021-05-10 08:26:02,383] Trial 4533 pruned.
[I 2021-05-10 08:26:02,962] Trial 4534 pruned.
[I 2021-05-10 08:26:04,575] Trial 4535 pruned.
[I 2021-05-10 08:26:05,167] Trial 4536 pruned.
[I 2021-05-10 08:26:05,758] Trial 4537 pruned.
[I 2021-05-10 08:26:06,350] Trial 4538 pruned.
[I 2021-05-10 08:26:06,945] Trial 4539 pruned.
[I 2021-05-10 08:26:07,525] Trial 4540 pruned.
[I 2021-05-10 08:26:07,734] Trial 4541 pruned.
[I 2021-05-10 08:26:08,325] Trial 4542 pruned.
[I 2021-05-10 08:26:08,914] Trial 4543 pruned.
[I 2021-05-10 08:26:09,499] Trial 4544 pruned.
[I 2021-05-10 08:26:10,091] Trial 4545 pruned.
[I 2021-05-10 08:26:10,681] Trial 4546 pruned.
[I 2021-05-10 08:26:11,264] Trial 4547 pruned.
[I 2021-05-10 08:26:11,855] Trial 4548 pruned.
[I 2021-05-10 08:26:12,444] Trial 4549 pruned.
[I 2021-05-10 08:26:13,023] Trial 4550 pruned.
[I 2021-05-10 08:26:13,610] Trial 4551 pruned.
[I 2021-05-10 08:26:14,199] Trial 4552 pruned.
[I 2021-05-10 08:26:14,791] Trial 4553 pruned.
[I 2021-05-10 08:26:14,939] Trial 4554 pruned.
[I 2021-05-10 08:26:15,278] Trial 4555 pruned.
[I 2021-05-10 08:26:15,868] Trial 4556 pruned.
[I 2021-05-10 08:26:16,443] Trial 4557 pruned.
[I 2021-05-10 08:26:17,032] Trial 4558 pruned.
[I 2021-05-10 08:26:17,619] Trial 4559 pruned.
[I 2021-05-10 08:26:18,201] Trial 4560 pruned.
[I 2021-05-10 08:26:18,797] Trial 4561 pruned.
[I 2021-05-10 08:26:19,388] Trial 4562 pruned.
[I 2021-05-10 08:26:19,976] Trial 4563 pruned.
[I 2021-05-10 08:26:20,559] Trial 4564 pruned.
[I 2021-05-10 08:26:21,147] Trial 4565 pruned.
[I 2021-05-10 08:26:21,737] Trial 4566 pruned.
[I 2021-05-10 08:26:22,316] Trial 4567 pruned.
[I 2021-05-10 08:26:22,913] Trial 4568 pruned.
[I 2021-05-10 08:26:23,504] Trial 4569 pruned.
[I 2021-05-10 08:26:24,096] Trial 4570 pruned.
[I 2021-05-10 08:26:24,685] Trial 4571 pruned.
[I 2021-05-10 08:26:25,278] Trial 4572 pruned.
[I 2021-05-10 08:26:25,491] Trial 4573 pruned.
[I 2021-05-10 08:26:26,070] Trial 4574 pruned.
[I 2021-05-10 08:26:26,666] Trial 4575 pruned.
[I 2021-05-10 08:26:27,262] Trial 4576 pruned.
[I 2021-05-10 08:26:27,841] Trial 4577 pruned.
[I 2021-05-10 08:26:28,428] Trial 4578 pruned.
[I 2021-05-10 08:26:29,022] Trial 4579 pruned.
[I 2021-05-10 08:26:29,610] Trial 4580 pruned.
[I 2021-05-10 08:26:30,192] Trial 4581 pruned.
[I 2021-05-10 08:26:30,341] Trial 4582 pruned.
[I 2021-05-10 08:26:30,925] Trial 4583 pruned.
[I 2021-05-10 08:26:31,508] Trial 4584 pruned.
[I 2021-05-10 08:26:32,094] Trial 4585 pruned.
[I 2021-05-10 08:26:32,437] Trial 4586 pruned.
[I 2021-05-10 08:26:33,024] Trial 4587 pruned.
[I 2021-05-10 08:27:19,855] Trial 4588 pruned.
[I 2021-05-10 08:27:20,445] Trial 4589 pruned.
[I 2021-05-10 08:27:21,036] Trial 4590 pruned.
[I 2021-05-10 08:27:21,607] Trial 4591 pruned.
[I 2021-05-10 08:27:22,200] Trial 4592 pruned.
[I 2021-05-10 08:27:22,795] Trial 4593 pruned.
[I 2021-05-10 08:27:23,376] Trial 4594 pruned.
[I 2021-05-10 08:27:23,967] Trial 4595 pruned.
[I 2021-05-10 08:27:24,556] Trial 4596 pruned.
[I 2021-05-10 08:27:25,150] Trial 4597 pruned.
[I 2021-05-10 08:27:25,744] Trial 4598 pruned.
[I 2021-05-10 08:27:26,336] Trial 4599 pruned.
[I 2021-05-10 08:27:26,549] Trial 4600 pruned.
[I 2021-05-10 08:27:27,131] Trial 4601 pruned.
[I 2021-05-10 08:27:27,724] Trial 4602 pruned.
[I 2021-05-10 08:27:28,321] Trial 4603 pruned.
[I 2021-05-10 08:27:28,903] Trial 4604 pruned.
[I 2021-05-10 08:27:29,497] Trial 4605 pruned.
[I 2021-05-10 08:27:30,089] Trial 4606 pruned.
[I 2021-05-10 08:27:30,682] Trial 4607 pruned.
[I 2021-05-10 08:27:31,267] Trial 4608 pruned.
[I 2021-05-10 08:27:31,872] Trial 4609 pruned.
[I 2021-05-10 08:27:32,468] Trial 4610 pruned.
[I 2021-05-10 08:27:32,615] Trial 4611 pruned.
[I 2021-05-10 08:27:33,215] Trial 4612 pruned.
[I 2021-05-10 08:27:33,808] Trial 4613 pruned.
[I 2021-05-10 08:27:34,141] Trial 4614 pruned.
[I 2021-05-10 08:27:34,739] Trial 4615 pruned.
[I 2021-05-10 08:27:35,324] Trial 4616 pruned.
[I 2021-05-10 08:27:35,918] Trial 4617 pruned.
[I 2021-05-10 08:27:36,501] Trial 4618 pruned.
[I 2021-05-10 08:27:37,090] Trial 4619 pruned.
[I 2021-05-10 08:27:37,684] Trial 4620 pruned.
[I 2021-05-10 08:27:38,270] Trial 4621 pruned.
[I 2021-05-10 08:27:38,862] Trial 4622 pruned.
[I 2021-05-10 08:27:39,454] Trial 4623 pruned.
[I 2021-05-10 08:27:40,047] Trial 4624 pruned.
[I 2021-05-10 08:27:40,628] Trial 4625 pruned.
[I 2021-05-10 08:27:41,218] Trial 4626 pruned.
[I 2021-05-10 08:27:41,812] Trial 4627 pruned.
[I 2021-05-10 08:27:42,395] Trial 4628 pruned.
[I 2021-05-10 08:27:42,607] Trial 4629 pruned.
[I 2021-05-10 08:27:43,202] Trial 4630 pruned.
[I 2021-05-10 08:27:43,784] Trial 4631 pruned.
[I 2021-05-10 08:27:44,378] Trial 4632 pruned.
[I 2021-05-10 08:27:44,972] Trial 4633 pruned.
[I 2021-05-10 08:27:45,570] Trial 4634 pruned.
[I 2021-05-10 08:27:46,157] Trial 4635 pruned.
[I 2021-05-10 08:27:46,748] Trial 4636 pruned.
[I 2021-05-10 08:27:47,331] Trial 4637 pruned.
[I 2021-05-10 08:27:47,906] Trial 4638 pruned.
[I 2021-05-10 08:27:48,498] Trial 4639 pruned.
[I 2021-05-10 08:27:48,650] Trial 4640 pruned.
[I 2021-05-10 08:27:49,238] Trial 4641 pruned.
[I 2021-05-10 08:27:49,828] Trial 4642 pruned.
[I 2021-05-10 08:27:50,416] Trial 4643 pruned.
[I 2021-05-10 08:27:51,003] Trial 4644 pruned.
[I 2021-05-10 08:27:51,589] Trial 4645 pruned.
[I 2021-05-10 08:27:51,932] Trial 4646 pruned.
[I 2021-05-10 08:27:52,521] Trial 4647 pruned.
[I 2021-05-10 08:27:53,108] Trial 4648 pruned.
[I 2021-05-10 08:27:53,703] Trial 4649 pruned.
[I 2021-05-10 08:27:54,298] Trial 4650 pruned.
[I 2021-05-10 08:27:54,892] Trial 4651 pruned.
[I 2021-05-10 08:27:55,480] Trial 4652 pruned.
[I 2021-05-10 08:27:56,074] Trial 4653 pruned.
[I 2021-05-10 08:27:56,665] Trial 4654 pruned.
[I 2021-05-10 08:27:57,253] Trial 4655 pruned.
[I 2021-05-10 08:27:57,846] Trial 4656 pruned.
[I 2021-05-10 08:27:58,440] Trial 4657 pruned.
[I 2021-05-10 08:27:58,652] Trial 4658 pruned.
[I 2021-05-10 08:27:59,245] Trial 4659 pruned.
[I 2021-05-10 08:27:59,835] Trial 4660 pruned.
[I 2021-05-10 08:28:00,433] Trial 4661 pruned.
[I 2021-05-10 08:28:01,016] Trial 4662 pruned.
[I 2021-05-10 08:28:01,597] Trial 4663 pruned.
[I 2021-05-10 08:28:02,190] Trial 4664 pruned.
[I 2021-05-10 08:28:02,764] Trial 4665 pruned.
[I 2021-05-10 08:28:03,360] Trial 4666 pruned.
[I 2021-05-10 08:28:03,952] Trial 4667 pruned.
[I 2021-05-10 08:28:04,535] Trial 4668 pruned.
[I 2021-05-10 08:28:04,686] Trial 4669 pruned.
[I 2021-05-10 08:28:05,281] Trial 4670 pruned.
[I 2021-05-10 08:28:05,861] Trial 4671 pruned.
[I 2021-05-10 08:28:06,197] Trial 4672 pruned.
[I 2021-05-10 08:28:06,795] Trial 4673 pruned.
[I 2021-05-10 08:28:07,391] Trial 4674 pruned.
[I 2021-05-10 08:28:07,975] Trial 4675 pruned.
[I 2021-05-10 08:28:08,563] Trial 4676 pruned.
[I 2021-05-10 08:28:09,155] Trial 4677 pruned.
[I 2021-05-10 08:29:00,666] Trial 4678 finished with value: 380.7075500488281 and parameters: {'lr': 0.006046713144823341, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:29:01,251] Trial 4679 pruned.
[I 2021-05-10 08:29:01,845] Trial 4680 pruned.
[I 2021-05-10 08:29:02,437] Trial 4681 pruned.
[I 2021-05-10 08:29:03,021] Trial 4682 pruned.
[I 2021-05-10 08:29:03,611] Trial 4683 pruned.
[I 2021-05-10 08:29:04,206] Trial 4684 pruned.
[I 2021-05-10 08:29:04,788] Trial 4685 pruned.
[I 2021-05-10 08:29:05,380] Trial 4686 pruned.
[I 2021-05-10 08:29:05,975] Trial 4687 pruned.
[I 2021-05-10 08:29:06,188] Trial 4688 pruned.
[I 2021-05-10 08:29:06,778] Trial 4689 pruned.
[I 2021-05-10 08:29:07,373] Trial 4690 pruned.
[I 2021-05-10 08:29:07,956] Trial 4691 pruned.
[I 2021-05-10 08:29:08,537] Trial 4692 pruned.
[I 2021-05-10 08:29:09,122] Trial 4693 pruned.
[I 2021-05-10 08:29:09,729] Trial 4694 pruned.
[I 2021-05-10 08:29:10,317] Trial 4695 pruned.
[I 2021-05-10 08:29:10,908] Trial 4696 pruned.
[I 2021-05-10 08:29:11,498] Trial 4697 pruned.
[I 2021-05-10 08:29:11,649] Trial 4698 pruned.
[I 2021-05-10 08:29:12,235] Trial 4699 pruned.
[I 2021-05-10 08:29:12,829] Trial 4700 pruned.
[I 2021-05-10 08:29:13,171] Trial 4701 pruned.
[I 2021-05-10 08:29:13,767] Trial 4702 pruned.
[I 2021-05-10 08:29:14,363] Trial 4703 pruned.
[I 2021-05-10 08:29:14,957] Trial 4704 pruned.
[I 2021-05-10 08:29:15,546] Trial 4705 pruned.
[I 2021-05-10 08:29:16,131] Trial 4706 pruned.
[I 2021-05-10 08:29:16,725] Trial 4707 pruned.
[I 2021-05-10 08:29:17,317] Trial 4708 pruned.
[I 2021-05-10 08:29:17,910] Trial 4709 pruned.
[I 2021-05-10 08:29:18,505] Trial 4710 pruned.
[I 2021-05-10 08:29:19,099] Trial 4711 pruned.
[I 2021-05-10 08:29:19,691] Trial 4712 pruned.
[I 2021-05-10 08:29:20,289] Trial 4713 pruned.
[I 2021-05-10 08:29:20,881] Trial 4714 pruned.
[I 2021-05-10 08:29:21,470] Trial 4715 pruned.
[I 2021-05-10 08:29:22,055] Trial 4716 pruned.
[I 2021-05-10 08:29:22,268] Trial 4717 pruned.
[I 2021-05-10 08:29:22,864] Trial 4718 pruned.
[I 2021-05-10 08:29:23,454] Trial 4719 pruned.
[I 2021-05-10 08:29:24,040] Trial 4720 pruned.
[I 2021-05-10 08:29:24,631] Trial 4721 pruned.
[I 2021-05-10 08:29:25,215] Trial 4722 pruned.
[I 2021-05-10 08:29:25,806] Trial 4723 pruned.
[I 2021-05-10 08:29:26,394] Trial 4724 pruned.
[I 2021-05-10 08:29:26,989] Trial 4725 pruned.
[I 2021-05-10 08:29:27,573] Trial 4726 pruned.
[I 2021-05-10 08:29:27,724] Trial 4727 pruned.
[I 2021-05-10 08:29:28,323] Trial 4728 pruned.
[I 2021-05-10 08:29:28,912] Trial 4729 pruned.
[I 2021-05-10 08:29:29,505] Trial 4730 pruned.
[I 2021-05-10 08:29:29,844] Trial 4731 pruned.
[I 2021-05-10 08:29:30,437] Trial 4732 pruned.
[I 2021-05-10 08:29:31,025] Trial 4733 pruned.
[I 2021-05-10 08:29:31,620] Trial 4734 pruned.
[I 2021-05-10 08:29:32,218] Trial 4735 pruned.
[I 2021-05-10 08:29:32,803] Trial 4736 pruned.
[I 2021-05-10 08:29:33,398] Trial 4737 pruned.
[I 2021-05-10 08:29:33,992] Trial 4738 pruned.
[I 2021-05-10 08:29:34,580] Trial 4739 pruned.
[I 2021-05-10 08:29:35,172] Trial 4740 pruned.
[I 2021-05-10 08:29:35,774] Trial 4741 pruned.
[I 2021-05-10 08:29:36,368] Trial 4742 pruned.
[I 2021-05-10 08:29:36,953] Trial 4743 pruned.
[I 2021-05-10 08:29:37,534] Trial 4744 pruned.
[I 2021-05-10 08:29:37,749] Trial 4745 pruned.
[I 2021-05-10 08:29:38,326] Trial 4746 pruned.
[I 2021-05-10 08:29:38,922] Trial 4747 pruned.
[I 2021-05-10 08:29:39,515] Trial 4748 pruned.
[I 2021-05-10 08:30:30,166] Trial 4749 finished with value: 370.37274169921875 and parameters: {'lr': 0.003736294358729859, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:30:30,756] Trial 4750 pruned.
[I 2021-05-10 08:30:31,345] Trial 4751 pruned.
[I 2021-05-10 08:30:31,924] Trial 4752 pruned.
[I 2021-05-10 08:30:32,518] Trial 4753 pruned.
[I 2021-05-10 08:30:33,108] Trial 4754 pruned.
[I 2021-05-10 08:30:33,704] Trial 4755 pruned.
[I 2021-05-10 08:30:34,288] Trial 4756 pruned.
[I 2021-05-10 08:30:34,880] Trial 4757 pruned.
[I 2021-05-10 08:30:35,032] Trial 4758 pruned.
[I 2021-05-10 08:30:35,368] Trial 4759 pruned.
[I 2021-05-10 08:30:35,951] Trial 4760 pruned.
[I 2021-05-10 08:30:36,537] Trial 4761 pruned.
[I 2021-05-10 08:30:37,128] Trial 4762 pruned.
[I 2021-05-10 08:30:37,717] Trial 4763 pruned.
[I 2021-05-10 08:30:38,303] Trial 4764 pruned.
[I 2021-05-10 08:30:38,884] Trial 4765 pruned.
[I 2021-05-10 08:30:39,478] Trial 4766 pruned.
[I 2021-05-10 08:30:40,061] Trial 4767 pruned.
[I 2021-05-10 08:30:40,643] Trial 4768 pruned.
[I 2021-05-10 08:30:41,226] Trial 4769 pruned.
[I 2021-05-10 08:30:41,811] Trial 4770 pruned.
[I 2021-05-10 08:30:42,399] Trial 4771 pruned.
[I 2021-05-10 08:30:42,972] Trial 4772 pruned.
[I 2021-05-10 08:30:43,562] Trial 4773 pruned.
[I 2021-05-10 08:30:44,152] Trial 4774 pruned.
[I 2021-05-10 08:30:44,742] Trial 4775 pruned.
[I 2021-05-10 08:30:44,955] Trial 4776 pruned.
[I 2021-05-10 08:30:45,542] Trial 4777 pruned.
[I 2021-05-10 08:30:46,115] Trial 4778 pruned.
[I 2021-05-10 08:30:46,713] Trial 4779 pruned.
[I 2021-05-10 08:30:47,287] Trial 4780 pruned.
[I 2021-05-10 08:30:47,881] Trial 4781 pruned.
[I 2021-05-10 08:30:48,472] Trial 4782 pruned.
[I 2021-05-10 08:30:49,063] Trial 4783 pruned.
[I 2021-05-10 08:30:49,650] Trial 4784 pruned.
[I 2021-05-10 08:30:50,235] Trial 4785 pruned.
[I 2021-05-10 08:30:50,389] Trial 4786 pruned.
[I 2021-05-10 08:30:50,972] Trial 4787 pruned.
[I 2021-05-10 08:30:51,311] Trial 4788 pruned.
[I 2021-05-10 08:30:51,901] Trial 4789 pruned.
[I 2021-05-10 08:30:52,491] Trial 4790 pruned.
[I 2021-05-10 08:30:53,088] Trial 4791 pruned.
[I 2021-05-10 08:30:53,687] Trial 4792 pruned.
[I 2021-05-10 08:30:54,276] Trial 4793 pruned.
[I 2021-05-10 08:30:54,870] Trial 4794 pruned.
[I 2021-05-10 08:30:55,459] Trial 4795 pruned.
[I 2021-05-10 08:30:56,051] Trial 4796 pruned.
[I 2021-05-10 08:30:56,635] Trial 4797 pruned.
[I 2021-05-10 08:30:57,226] Trial 4798 pruned.
[I 2021-05-10 08:30:57,816] Trial 4799 pruned.
[I 2021-05-10 08:30:58,395] Trial 4800 pruned.
[I 2021-05-10 08:30:58,977] Trial 4801 pruned.
[I 2021-05-10 08:30:59,560] Trial 4802 pruned.
[I 2021-05-10 08:31:04,700] Trial 4803 pruned.
[I 2021-05-10 08:31:05,286] Trial 4804 pruned.
[I 2021-05-10 08:31:05,875] Trial 4805 pruned.
[I 2021-05-10 08:31:06,464] Trial 4806 pruned.
[I 2021-05-10 08:31:06,676] Trial 4807 pruned.
[I 2021-05-10 08:31:07,266] Trial 4808 pruned.
[I 2021-05-10 08:31:07,852] Trial 4809 pruned.
[I 2021-05-10 08:31:08,440] Trial 4810 pruned.
[I 2021-05-10 08:31:09,023] Trial 4811 pruned.
[I 2021-05-10 08:31:09,607] Trial 4812 pruned.
[I 2021-05-10 08:31:10,190] Trial 4813 pruned.
[I 2021-05-10 08:31:10,779] Trial 4814 pruned.
[I 2021-05-10 08:31:10,934] Trial 4815 pruned.
[I 2021-05-10 08:31:11,527] Trial 4816 pruned.
[I 2021-05-10 08:31:12,114] Trial 4817 pruned.
[I 2021-05-10 08:31:12,702] Trial 4818 pruned.
[I 2021-05-10 08:31:13,041] Trial 4819 pruned.
[I 2021-05-10 08:31:13,628] Trial 4820 pruned.
[I 2021-05-10 08:31:14,221] Trial 4821 pruned.
[I 2021-05-10 08:31:14,806] Trial 4822 pruned.
[I 2021-05-10 08:31:15,393] Trial 4823 pruned.
[I 2021-05-10 08:31:15,980] Trial 4824 pruned.
[I 2021-05-10 08:31:16,564] Trial 4825 pruned.
[I 2021-05-10 08:32:05,955] Trial 4826 finished with value: 367.75885009765625 and parameters: {'lr': 0.0038135516175555133, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:32:06,531] Trial 4827 pruned.
[I 2021-05-10 08:32:07,130] Trial 4828 pruned.
[I 2021-05-10 08:32:07,709] Trial 4829 pruned.
[I 2021-05-10 08:32:57,250] Trial 4830 finished with value: 373.61663818359375 and parameters: {'lr': 0.002862134022491621, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:32:57,830] Trial 4831 pruned.
[I 2021-05-10 08:32:58,420] Trial 4832 pruned.
[I 2021-05-10 08:32:59,011] Trial 4833 pruned.
[I 2021-05-10 08:32:59,220] Trial 4834 pruned.
[I 2021-05-10 08:32:59,800] Trial 4835 pruned.
[I 2021-05-10 08:33:00,380] Trial 4836 pruned.
[I 2021-05-10 08:33:00,961] Trial 4837 pruned.
[I 2021-05-10 08:33:01,540] Trial 4838 pruned.
[I 2021-05-10 08:33:02,119] Trial 4839 pruned.
[I 2021-05-10 08:33:02,713] Trial 4840 pruned.
[I 2021-05-10 08:33:03,295] Trial 4841 pruned.
[I 2021-05-10 08:33:03,877] Trial 4842 pruned.
[I 2021-05-10 08:33:04,454] Trial 4843 pruned.
[I 2021-05-10 08:33:05,034] Trial 4844 pruned.
[I 2021-05-10 08:33:05,611] Trial 4845 pruned.
[I 2021-05-10 08:33:05,764] Trial 4846 pruned.
[I 2021-05-10 08:33:06,344] Trial 4847 pruned.
[I 2021-05-10 08:33:06,677] Trial 4848 pruned.
[I 2021-05-10 08:33:07,255] Trial 4849 pruned.
[I 2021-05-10 08:33:07,835] Trial 4850 pruned.
[I 2021-05-10 08:33:08,412] Trial 4851 pruned.
[I 2021-05-10 08:33:08,991] Trial 4852 pruned.
[I 2021-05-10 08:33:09,566] Trial 4853 pruned.
[I 2021-05-10 08:33:10,139] Trial 4854 pruned.
[I 2021-05-10 08:33:10,719] Trial 4855 pruned.
[I 2021-05-10 08:33:11,290] Trial 4856 pruned.
[I 2021-05-10 08:33:11,863] Trial 4857 pruned.
[I 2021-05-10 08:33:12,437] Trial 4858 pruned.
[I 2021-05-10 08:33:13,013] Trial 4859 pruned.
[I 2021-05-10 08:33:13,590] Trial 4860 pruned.
[I 2021-05-10 08:33:14,161] Trial 4861 pruned.
[I 2021-05-10 08:33:14,755] Trial 4862 pruned.
[I 2021-05-10 08:33:15,341] Trial 4863 pruned.
[I 2021-05-10 08:33:15,555] Trial 4864 pruned.
[I 2021-05-10 08:33:16,135] Trial 4865 pruned.
[I 2021-05-10 08:33:16,717] Trial 4866 pruned.
[I 2021-05-10 08:33:17,293] Trial 4867 pruned.
[I 2021-05-10 08:33:17,871] Trial 4868 pruned.
[I 2021-05-10 08:33:18,458] Trial 4869 pruned.
[I 2021-05-10 08:33:19,033] Trial 4870 pruned.
[I 2021-05-10 08:33:19,608] Trial 4871 pruned.
[I 2021-05-10 08:33:20,188] Trial 4872 pruned.
[I 2021-05-10 08:33:20,770] Trial 4873 pruned.
[I 2021-05-10 08:33:20,922] Trial 4874 pruned.
[I 2021-05-10 08:33:21,497] Trial 4875 pruned.
[I 2021-05-10 08:33:22,073] Trial 4876 pruned.
[I 2021-05-10 08:33:22,406] Trial 4877 pruned.
[I 2021-05-10 08:33:22,985] Trial 4878 pruned.
[I 2021-05-10 08:33:23,563] Trial 4879 pruned.
[I 2021-05-10 08:33:24,143] Trial 4880 pruned.
[I 2021-05-10 08:33:24,741] Trial 4881 pruned.
[I 2021-05-10 08:33:25,320] Trial 4882 pruned.
[I 2021-05-10 08:33:25,913] Trial 4883 pruned.
[I 2021-05-10 08:33:26,497] Trial 4884 pruned.
[I 2021-05-10 08:33:27,074] Trial 4885 pruned.
[I 2021-05-10 08:33:27,658] Trial 4886 pruned.
[I 2021-05-10 08:33:28,245] Trial 4887 pruned.
[I 2021-05-10 08:33:28,807] Trial 4888 pruned.
[I 2021-05-10 08:33:29,395] Trial 4889 pruned.
[I 2021-05-10 08:33:29,991] Trial 4890 pruned.
[I 2021-05-10 08:33:30,565] Trial 4891 pruned.
[I 2021-05-10 08:33:31,152] Trial 4892 pruned.
[I 2021-05-10 08:33:31,751] Trial 4893 pruned.
[I 2021-05-10 08:33:31,971] Trial 4894 pruned.
[I 2021-05-10 08:33:32,561] Trial 4895 pruned.
[I 2021-05-10 08:33:33,149] Trial 4896 pruned.
[I 2021-05-10 08:33:33,730] Trial 4897 pruned.
[I 2021-05-10 08:33:34,318] Trial 4898 pruned.
[I 2021-05-10 08:33:34,911] Trial 4899 pruned.
[I 2021-05-10 08:33:35,491] Trial 4900 pruned.
[I 2021-05-10 08:33:36,082] Trial 4901 pruned.
[I 2021-05-10 08:33:36,674] Trial 4902 pruned.
[I 2021-05-10 08:33:37,250] Trial 4903 pruned.
[I 2021-05-10 08:33:37,404] Trial 4904 pruned.
[I 2021-05-10 08:33:37,995] Trial 4905 pruned.
[I 2021-05-10 08:33:38,571] Trial 4906 pruned.
[I 2021-05-10 08:33:39,157] Trial 4907 pruned.
[I 2021-05-10 08:33:39,764] Trial 4908 pruned.
[I 2021-05-10 08:33:40,101] Trial 4909 pruned.
[I 2021-05-10 08:33:40,691] Trial 4910 pruned.
[I 2021-05-10 08:33:41,277] Trial 4911 pruned.
[I 2021-05-10 08:33:41,852] Trial 4912 pruned.
[I 2021-05-10 08:33:42,430] Trial 4913 pruned.
[I 2021-05-10 08:33:43,014] Trial 4914 pruned.
[I 2021-05-10 08:33:43,597] Trial 4915 pruned.
[I 2021-05-10 08:33:44,188] Trial 4916 pruned.
[I 2021-05-10 08:33:44,769] Trial 4917 pruned.
[I 2021-05-10 08:33:45,348] Trial 4918 pruned.
[I 2021-05-10 08:33:45,929] Trial 4919 pruned.
[I 2021-05-10 08:33:46,513] Trial 4920 pruned.
[I 2021-05-10 08:33:47,089] Trial 4921 pruned.
[I 2021-05-10 08:33:47,681] Trial 4922 pruned.
[I 2021-05-10 08:33:48,279] Trial 4923 pruned.
[I 2021-05-10 08:33:48,868] Trial 4924 pruned.
[I 2021-05-10 08:33:49,457] Trial 4925 pruned.
[I 2021-05-10 08:33:49,672] Trial 4926 pruned.
[I 2021-05-10 08:33:50,252] Trial 4927 pruned.
[I 2021-05-10 08:33:50,840] Trial 4928 pruned.
[I 2021-05-10 08:33:51,425] Trial 4929 pruned.
[I 2021-05-10 08:33:52,001] Trial 4930 pruned.
[I 2021-05-10 08:33:52,599] Trial 4931 pruned.
[I 2021-05-10 08:33:52,754] Trial 4932 pruned.
[I 2021-05-10 08:33:53,337] Trial 4933 pruned.
[I 2021-05-10 08:33:53,927] Trial 4934 pruned.
[I 2021-05-10 08:33:54,280] Trial 4935 pruned.
[I 2021-05-10 08:33:54,864] Trial 4936 pruned.
[I 2021-05-10 08:33:55,450] Trial 4937 pruned.
[I 2021-05-10 08:33:56,036] Trial 4938 pruned.
[I 2021-05-10 08:33:56,601] Trial 4939 pruned.
[I 2021-05-10 08:33:57,197] Trial 4940 pruned.
[I 2021-05-10 08:33:57,794] Trial 4941 pruned.
[I 2021-05-10 08:33:58,375] Trial 4942 pruned.
[I 2021-05-10 08:33:58,973] Trial 4943 pruned.
[I 2021-05-10 08:33:59,568] Trial 4944 pruned.
[I 2021-05-10 08:34:00,162] Trial 4945 pruned.
[I 2021-05-10 08:34:00,762] Trial 4946 pruned.
[I 2021-05-10 08:34:01,364] Trial 4947 pruned.
[I 2021-05-10 08:34:01,950] Trial 4948 pruned.
[I 2021-05-10 08:34:02,535] Trial 4949 pruned.
[I 2021-05-10 08:34:03,134] Trial 4950 pruned.
[I 2021-05-10 08:34:03,728] Trial 4951 pruned.
[I 2021-05-10 08:34:03,945] Trial 4952 pruned.
[I 2021-05-10 08:34:04,549] Trial 4953 pruned.
[I 2021-05-10 08:34:05,136] Trial 4954 pruned.
[I 2021-05-10 08:34:56,653] Trial 4955 finished with value: 373.3895263671875 and parameters: {'lr': 0.003506418494032245, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:34:57,251] Trial 4956 pruned.
[I 2021-05-10 08:34:57,844] Trial 4957 pruned.
[I 2021-05-10 08:34:58,443] Trial 4958 pruned.
[I 2021-05-10 08:34:59,046] Trial 4959 pruned.
[I 2021-05-10 08:35:00,648] Trial 4960 pruned.
[I 2021-05-10 08:35:00,803] Trial 4961 pruned.
[I 2021-05-10 08:35:01,400] Trial 4962 pruned.
[I 2021-05-10 08:35:01,985] Trial 4963 pruned.
[I 2021-05-10 08:35:02,334] Trial 4964 pruned.
[I 2021-05-10 08:35:02,919] Trial 4965 pruned.
[I 2021-05-10 08:35:03,503] Trial 4966 pruned.
[I 2021-05-10 08:35:04,103] Trial 4967 pruned.
[I 2021-05-10 08:35:04,708] Trial 4968 pruned.
[I 2021-05-10 08:35:05,290] Trial 4969 pruned.
[I 2021-05-10 08:35:05,892] Trial 4970 pruned.
[I 2021-05-10 08:35:06,496] Trial 4971 pruned.
[I 2021-05-10 08:35:07,083] Trial 4972 pruned.
[I 2021-05-10 08:35:07,689] Trial 4973 pruned.
[I 2021-05-10 08:35:08,292] Trial 4974 pruned.
[I 2021-05-10 08:35:08,881] Trial 4975 pruned.
[I 2021-05-10 08:35:09,469] Trial 4976 pruned.
[I 2021-05-10 08:35:10,069] Trial 4977 pruned.
[I 2021-05-10 08:35:10,657] Trial 4978 pruned.
[I 2021-05-10 08:35:10,876] Trial 4979 pruned.
[I 2021-05-10 08:35:11,479] Trial 4980 pruned.
[I 2021-05-10 08:35:12,066] Trial 4981 pruned.
[I 2021-05-10 08:35:12,664] Trial 4982 pruned.
[I 2021-05-10 08:35:13,263] Trial 4983 pruned.
[I 2021-05-10 08:35:13,858] Trial 4984 pruned.
[I 2021-05-10 08:35:14,460] Trial 4985 pruned.
[I 2021-05-10 08:35:15,060] Trial 4986 pruned.
[I 2021-05-10 08:36:05,729] Trial 4987 finished with value: 372.5562438964844 and parameters: {'lr': 0.004557440778559366, 'batch_size': 16, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 72 with value: 362.7355651855469.
[I 2021-05-10 08:36:06,324] Trial 4988 pruned.
[I 2021-05-10 08:36:06,922] Trial 4989 pruned.
[I 2021-05-10 08:36:07,501] Trial 4990 pruned.
[I 2021-05-10 08:36:07,656] Trial 4991 pruned.
[I 2021-05-10 08:36:08,254] Trial 4992 pruned.
[I 2021-05-10 08:36:08,594] Trial 4993 pruned.
[I 2021-05-10 08:36:09,194] Trial 4994 pruned.
[I 2021-05-10 08:36:09,795] Trial 4995 pruned.
[I 2021-05-10 08:36:10,384] Trial 4996 pruned.
[I 2021-05-10 08:36:10,970] Trial 4997 pruned.
[I 2021-05-10 08:36:11,571] Trial 4998 pruned.
[I 2021-05-10 08:36:12,158] Trial 4999 pruned.
Wall time: 2h 20min 31s
trials_df = study.trials_dataframe()
trials_df
| number | value | datetime_start | datetime_complete | duration | params_HL0_ac_fn | params_HL1_ac_fn | params_HL2_ac_fn | params_HL3_ac_fn | params_batch_size | params_lr | state | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 381.408112 | 2021-05-10 06:15:40.601440 | 2021-05-10 06:16:33.487259 | 0 days 00:00:52.885819 | relu | relu | relu | relu | 16 | 0.005612 | COMPLETE |
| 1 | 1 | 388.070770 | 2021-05-10 06:16:33.489253 | 2021-05-10 06:16:39.933604 | 0 days 00:00:06.444351 | relu | relu | linear | linear | 128 | 0.002659 | COMPLETE |
| 2 | 2 | 394.286255 | 2021-05-10 06:16:39.935599 | 2021-05-10 06:16:46.369486 | 0 days 00:00:06.433887 | relu | linear | relu | linear | 128 | 0.002508 | COMPLETE |
| 3 | 3 | 378.561462 | 2021-05-10 06:16:46.370483 | 2021-05-10 06:16:52.835316 | 0 days 00:00:06.464833 | linear | linear | relu | relu | 128 | 0.007591 | COMPLETE |
| 4 | 4 | 643.163696 | 2021-05-10 06:16:52.837312 | 2021-05-10 06:17:05.423851 | 0 days 00:00:12.586539 | relu | linear | linear | relu | 64 | 0.075683 | COMPLETE |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 4995 | 4995 | 546.698364 | 2021-05-10 08:36:09.196851 | 2021-05-10 08:36:09.795252 | 0 days 00:00:00.598401 | relu | relu | relu | relu | 16 | 0.002724 | PRUNED |
| 4996 | 4996 | 405.303802 | 2021-05-10 08:36:09.796249 | 2021-05-10 08:36:10.384676 | 0 days 00:00:00.588427 | relu | relu | relu | linear | 16 | 0.005026 | PRUNED |
| 4997 | 4997 | 444.182587 | 2021-05-10 08:36:10.386671 | 2021-05-10 08:36:10.970111 | 0 days 00:00:00.583440 | relu | linear | relu | relu | 16 | 0.003869 | PRUNED |
| 4998 | 4998 | 483.671448 | 2021-05-10 08:36:10.972106 | 2021-05-10 08:36:11.571504 | 0 days 00:00:00.599398 | relu | relu | relu | relu | 16 | 0.005995 | PRUNED |
| 4999 | 4999 | 495.920013 | 2021-05-10 08:36:11.573499 | 2021-05-10 08:36:12.158934 | 0 days 00:00:00.585435 | relu | relu | relu | linear | 16 | 0.003346 | PRUNED |
5000 rows × 12 columns
trials_df.to_pickle('dnn_trials/' + study_name + '_df.pkl')
# save the study for resuming later:
joblib.dump(study, "dnn_trials/" + study_name + '_study.pkl')
['dnn_trials/dnn_45_var_100_nrn_1st_layer_study.pkl']
graph = optuna.visualization.plot_parallel_coordinate(study)
graph.write_image("dnn_trials/" + study_name + '_plot.jpeg')
graph.show()
optuna.visualization.plot_optimization_history(study)
optuna.visualization.plot_intermediate_values(study)
# to load the study later:
study = joblib.load("dnn_trials/dnn_all_45_variables_study.pkl")
print("Best trial until now:")
print(" Value: ", study.best_trial.value)
print(" Params: ")
for key, value in study.best_trial.params.items():
print(f" {key}: {value}")
Best trial until now:
Value: 139.8904571533203
Params:
n_layers: 3
neurons_HL0: 316
neurons_HL1: 367
neurons_HL2: 624
%%time
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
mape_train = []
mape_scores = []
rmse_train = []
rmse_scores = []
mae_train = []
mae_scores = []
r2_train = []
r2_scores = []
n_epochs = 1000
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def tuned_model_investigation(trial):
"""This function will take the best trial and run it with the same objective function as above.
However, this time, there are more scores that are returned. The model is also saved as pickle in this function for
use later."""
for key, value in trial.params.items():
print(f" {key}: {value}")
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_loguniform('lr', 1e-3, 1e-1)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
# n_layers = trial.suggest_int('n_layers', 2, 5)
# layers = []
# in_features = 45
# out_features = 0
# max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
# for i in range(n_layers):
# out_features = int(max_nrns)
# layers.append(torch.nn.Linear(in_features, out_features))
# activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
# if activation == "relu":
# layers.append(torch.nn.ReLU())
# in_features = out_features
# # to prevent the last layer being Linear(0,1)
# if max_nrns > 2:
# max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
# layers.append(torch.nn.Linear(out_features, 1))
layers = []
layers.append(torch.nn.Linear(45, 100))
activation = trial.suggest_categorical("HL0_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(100, 50))
activation = trial.suggest_categorical("HL1_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(50, 25))
activation = trial.suggest_categorical("HL2_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(25, 12))
activation = trial.suggest_categorical("HL3_ac_fn", ["relu", "linear"])
if activation == 'relu':
layers.append(torch.nn.ReLU())
layers.append(torch.nn.Linear(12, 1))
dnn_model = torch.nn.Sequential(*layers).to(device)
# now we save the model:
with open('dnn_trials/' + study_name + '_trial{}.pickle'.format(trial.number), 'wb') as fout:
pickle.dump(dnn_model, fout)
print(dnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(dnn_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
# we create a random permutation of numbers from X_train.size()
permutation = torch.randperm(X_train.size()[0])
# we go through the batches.
for i in range(0, X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = X_train[indices], Y_train[indices]
# input X_train into dnn and get predictions.
train_prediction = dnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
train_mape = MAPE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_rmse = RMSE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_r2 = R2_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
mae_train.append(train_loss.item())
mape_train.append(train_mape.item())
rmse_train.append(train_rmse.item())
r2_train.append(train_r2.item())
# now we test the model:
test_prediction = dnn_model(X_test.to(device))
test_loss = loss_fn(test_prediction, Y_test.to(device))
test_mape = MAPE_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
test_rmse = RMSE_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
test_r2 = R2_pytorch(Y_test.to('cpu'), test_prediction.to('cpu'))
mae_scores.append(test_loss.item())
mape_scores.append(test_mape.item())
rmse_scores.append(test_rmse.item())
r2_scores.append(test_r2.item())
print(f"Epoch{epoch+1}\ttrain_loss={train_loss};\ttest_loss={test_loss}")
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return mae_scores, mape_scores, rmse_scores, r2_scores
mae_scores, mape_scores, rmse_scores, r2_scores = tuned_model_investigation(
study.best_trial)
print("\n\n------------------------------------------- TRAINING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_train)} +/- {np.std(mae_train)}")
print(f"Overall RMSE: {np.mean(rmse_train)} +/- {np.std(rmse_train)}")
print(f"Overall MAPE: {np.mean(mape_train)} +/- {np.std(mape_train)}")
print(f"Overall R2: {np.mean(r2_train)*100} +/- {np.std(r2_train)*100}")
print("\n\n------------------------------------------- TESTING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_scores)} +/- {np.std(mae_scores)}")
print(f"Overall RMSE: {np.mean(rmse_scores)} +/- {np.std(rmse_scores)}")
print(f"Overall MAPE: {np.mean(mape_scores)} +/- {np.std(mape_scores)}")
print(f"Overall R2: {np.mean(r2_scores)*100} +/- {np.std(r2_scores)*100}\n\n")
lr: 0.006250169080446751
batch_size: 16
HL0_ac_fn: relu
HL1_ac_fn: relu
HL2_ac_fn: relu
HL3_ac_fn: relu
Sequential(
(0): Linear(in_features=45, out_features=100, bias=True)
(1): ReLU()
(2): Linear(in_features=100, out_features=50, bias=True)
(3): ReLU()
(4): Linear(in_features=50, out_features=25, bias=True)
(5): ReLU()
(6): Linear(in_features=25, out_features=12, bias=True)
(7): ReLU()
(8): Linear(in_features=12, out_features=1, bias=True)
)
Epoch1 train_loss=600.757568359375; test_loss=424.01434326171875
Epoch2 train_loss=86.76095581054688; test_loss=402.0306396484375
Epoch3 train_loss=518.6043090820312; test_loss=399.5040588378906
Epoch4 train_loss=480.8683166503906; test_loss=401.7657470703125
Epoch5 train_loss=442.3416442871094; test_loss=391.0260314941406
Epoch6 train_loss=614.6720581054688; test_loss=394.6177673339844
Epoch7 train_loss=617.058349609375; test_loss=388.6061096191406
Epoch8 train_loss=73.84080505371094; test_loss=401.186767578125
Epoch9 train_loss=344.40380859375; test_loss=387.6961975097656
Epoch10 train_loss=472.1603088378906; test_loss=409.3566589355469
Epoch11 train_loss=633.4678344726562; test_loss=412.60833740234375
Epoch12 train_loss=105.6498031616211; test_loss=387.5329895019531
Epoch13 train_loss=197.22259521484375; test_loss=387.36346435546875
Epoch14 train_loss=270.9827575683594; test_loss=391.2177734375
Epoch15 train_loss=130.91416931152344; test_loss=389.3243103027344
Epoch16 train_loss=276.9669189453125; test_loss=412.1168518066406
Epoch17 train_loss=206.90411376953125; test_loss=395.5287170410156
Epoch18 train_loss=460.0657958984375; test_loss=413.5340881347656
Epoch19 train_loss=691.27685546875; test_loss=392.21722412109375
Epoch20 train_loss=291.2021484375; test_loss=398.6605529785156
Epoch21 train_loss=549.2440185546875; test_loss=453.40283203125
Epoch22 train_loss=143.21725463867188; test_loss=394.7188415527344
Epoch23 train_loss=763.0633544921875; test_loss=402.40618896484375
Epoch24 train_loss=546.8080444335938; test_loss=423.1016540527344
Epoch25 train_loss=433.8251953125; test_loss=411.0635681152344
Epoch26 train_loss=269.7571105957031; test_loss=398.3175354003906
Epoch27 train_loss=450.9269714355469; test_loss=460.4606018066406
Epoch28 train_loss=404.0212707519531; test_loss=388.2127990722656
Epoch29 train_loss=220.0858154296875; test_loss=384.6281433105469
Epoch30 train_loss=303.67401123046875; test_loss=393.1850891113281
Epoch31 train_loss=179.7127685546875; test_loss=384.0798645019531
Epoch32 train_loss=269.4942626953125; test_loss=389.2945251464844
Epoch33 train_loss=349.89447021484375; test_loss=446.9591979980469
Epoch34 train_loss=371.9017333984375; test_loss=420.1323547363281
Epoch35 train_loss=272.2851867675781; test_loss=389.2330627441406
Epoch36 train_loss=496.1641845703125; test_loss=407.1203308105469
Epoch37 train_loss=386.10113525390625; test_loss=427.8958435058594
Epoch38 train_loss=656.670166015625; test_loss=399.84063720703125
Epoch39 train_loss=401.7801513671875; test_loss=386.9415283203125
Epoch40 train_loss=267.43389892578125; test_loss=392.9068908691406
Epoch41 train_loss=389.2626953125; test_loss=382.3907165527344
Epoch42 train_loss=272.23651123046875; test_loss=413.6089782714844
Epoch43 train_loss=388.0451965332031; test_loss=394.7062072753906
Epoch44 train_loss=222.8367919921875; test_loss=405.3193054199219
Epoch45 train_loss=602.287353515625; test_loss=419.7044982910156
Epoch46 train_loss=219.99478149414062; test_loss=402.5643615722656
Epoch47 train_loss=387.541259765625; test_loss=388.80059814453125
Epoch48 train_loss=508.1908874511719; test_loss=412.5461120605469
Epoch49 train_loss=490.960693359375; test_loss=403.58758544921875
Epoch50 train_loss=291.7484130859375; test_loss=416.2393493652344
Epoch51 train_loss=196.42282104492188; test_loss=443.0955505371094
Epoch52 train_loss=266.6667175292969; test_loss=405.6910095214844
Epoch53 train_loss=529.3663940429688; test_loss=385.5306091308594
Epoch54 train_loss=223.473876953125; test_loss=444.9707336425781
Epoch55 train_loss=458.1748046875; test_loss=419.4282531738281
Epoch56 train_loss=536.5149536132812; test_loss=379.5625305175781
Epoch57 train_loss=181.40402221679688; test_loss=396.011474609375
Epoch58 train_loss=286.0794677734375; test_loss=382.16845703125
Epoch59 train_loss=393.6590576171875; test_loss=381.0058288574219
Epoch60 train_loss=663.5880126953125; test_loss=397.36138916015625
Epoch61 train_loss=562.8963623046875; test_loss=388.8777770996094
Epoch62 train_loss=386.55120849609375; test_loss=436.8990173339844
Epoch63 train_loss=272.4879455566406; test_loss=414.3258361816406
Epoch64 train_loss=465.2567138671875; test_loss=393.906494140625
Epoch65 train_loss=500.8145751953125; test_loss=409.9725036621094
Epoch66 train_loss=259.1488037109375; test_loss=382.7648010253906
Epoch67 train_loss=314.5995788574219; test_loss=430.9180603027344
Epoch68 train_loss=324.4909362792969; test_loss=388.65362548828125
Epoch69 train_loss=486.7477111816406; test_loss=379.82708740234375
Epoch70 train_loss=40.85663604736328; test_loss=379.4337158203125
Epoch71 train_loss=783.5013427734375; test_loss=392.2137145996094
Epoch72 train_loss=241.3794708251953; test_loss=386.9978332519531
Epoch73 train_loss=310.8658447265625; test_loss=383.7156066894531
Epoch74 train_loss=416.3543701171875; test_loss=372.18878173828125
Epoch75 train_loss=438.05621337890625; test_loss=373.4554138183594
Epoch76 train_loss=235.51947021484375; test_loss=492.57513427734375
Epoch77 train_loss=196.46412658691406; test_loss=466.65045166015625
Epoch78 train_loss=487.30755615234375; test_loss=446.510009765625
Epoch79 train_loss=229.52767944335938; test_loss=399.0201416015625
Epoch80 train_loss=618.3919677734375; test_loss=384.3115234375
Epoch81 train_loss=229.56735229492188; test_loss=388.4666748046875
Epoch82 train_loss=578.6114501953125; test_loss=397.4969482421875
Epoch83 train_loss=583.963623046875; test_loss=422.08453369140625
Epoch84 train_loss=309.7462158203125; test_loss=381.8316345214844
Epoch85 train_loss=428.44183349609375; test_loss=376.560791015625
Epoch86 train_loss=732.9752197265625; test_loss=385.5968017578125
Epoch87 train_loss=481.97686767578125; test_loss=374.8504638671875
Epoch88 train_loss=37.682334899902344; test_loss=378.7677917480469
Epoch89 train_loss=287.1799621582031; test_loss=372.91448974609375
Epoch90 train_loss=399.40130615234375; test_loss=384.05181884765625
Epoch91 train_loss=76.78559112548828; test_loss=396.17041015625
Epoch92 train_loss=538.1116943359375; test_loss=379.2418518066406
Epoch93 train_loss=498.2303771972656; test_loss=372.13671875
Epoch94 train_loss=367.85894775390625; test_loss=389.1632385253906
Epoch95 train_loss=223.9518280029297; test_loss=368.65216064453125
Epoch96 train_loss=572.2291870117188; test_loss=378.1423034667969
Epoch97 train_loss=308.19488525390625; test_loss=371.0794372558594
Epoch98 train_loss=557.759765625; test_loss=368.38690185546875
Epoch99 train_loss=361.16656494140625; test_loss=383.0950012207031
Epoch100 train_loss=228.05474853515625; test_loss=451.7761535644531
Epoch101 train_loss=417.9295349121094; test_loss=375.8046875
Epoch102 train_loss=716.304443359375; test_loss=494.6686096191406
Epoch103 train_loss=371.77801513671875; test_loss=481.41082763671875
Epoch104 train_loss=252.31658935546875; test_loss=446.7171325683594
Epoch105 train_loss=428.8770751953125; test_loss=446.5806579589844
Epoch106 train_loss=339.6385498046875; test_loss=443.4878845214844
Epoch107 train_loss=417.89739990234375; test_loss=443.1387023925781
Epoch108 train_loss=465.42547607421875; test_loss=449.28369140625
Epoch109 train_loss=398.39056396484375; test_loss=448.6145324707031
Epoch110 train_loss=382.3852233886719; test_loss=460.1257019042969
Epoch111 train_loss=687.4315795898438; test_loss=468.0865173339844
Epoch112 train_loss=312.20782470703125; test_loss=451.032958984375
Epoch113 train_loss=382.9117736816406; test_loss=442.89056396484375
Epoch114 train_loss=387.4623718261719; test_loss=462.90399169921875
Epoch115 train_loss=376.9652099609375; test_loss=425.4693603515625
Epoch116 train_loss=258.8744812011719; test_loss=422.6286926269531
Epoch117 train_loss=245.15628051757812; test_loss=435.07501220703125
Epoch118 train_loss=452.24658203125; test_loss=431.8138732910156
Epoch119 train_loss=640.3556518554688; test_loss=433.1429748535156
Epoch120 train_loss=338.5849914550781; test_loss=427.917724609375
Epoch121 train_loss=489.6611633300781; test_loss=435.1104736328125
Epoch122 train_loss=904.6780395507812; test_loss=427.79962158203125
Epoch123 train_loss=525.0635986328125; test_loss=422.9634704589844
Epoch124 train_loss=454.5409851074219; test_loss=429.6218566894531
Epoch125 train_loss=173.93934631347656; test_loss=429.93536376953125
Epoch126 train_loss=192.3108673095703; test_loss=430.0594787597656
Epoch127 train_loss=614.7636108398438; test_loss=432.11785888671875
Epoch128 train_loss=153.65464782714844; test_loss=416.5779724121094
Epoch129 train_loss=430.6405029296875; test_loss=425.41375732421875
Epoch130 train_loss=778.3138427734375; test_loss=411.68023681640625
Epoch131 train_loss=840.8895263671875; test_loss=440.42919921875
Epoch132 train_loss=192.0175323486328; test_loss=428.5643005371094
Epoch133 train_loss=388.4551696777344; test_loss=466.9436950683594
Epoch134 train_loss=296.5500793457031; test_loss=415.9062805175781
Epoch135 train_loss=246.00924682617188; test_loss=414.2784729003906
Epoch136 train_loss=329.608642578125; test_loss=420.3449401855469
Epoch137 train_loss=721.7691040039062; test_loss=433.4685974121094
Epoch138 train_loss=150.048095703125; test_loss=410.3773193359375
Epoch139 train_loss=369.51690673828125; test_loss=422.6707763671875
Epoch140 train_loss=247.3246612548828; test_loss=410.9170837402344
Epoch141 train_loss=751.95068359375; test_loss=409.99444580078125
Epoch142 train_loss=297.3209228515625; test_loss=403.70892333984375
Epoch143 train_loss=270.23797607421875; test_loss=435.7983703613281
Epoch144 train_loss=255.57583618164062; test_loss=405.6451416015625
Epoch145 train_loss=295.8756103515625; test_loss=407.10040283203125
Epoch146 train_loss=850.5513916015625; test_loss=406.8967590332031
Epoch147 train_loss=377.3826904296875; test_loss=408.0463562011719
Epoch148 train_loss=229.34930419921875; test_loss=402.9857482910156
Epoch149 train_loss=178.49313354492188; test_loss=407.57952880859375
Epoch150 train_loss=819.8033447265625; test_loss=414.7149658203125
Epoch151 train_loss=231.99522399902344; test_loss=403.5608215332031
Epoch152 train_loss=125.25372314453125; test_loss=412.9594421386719
Epoch153 train_loss=247.91111755371094; test_loss=410.24267578125
Epoch154 train_loss=646.7965087890625; test_loss=409.93743896484375
Epoch155 train_loss=149.60287475585938; test_loss=414.5819396972656
Epoch156 train_loss=275.7962951660156; test_loss=414.2747802734375
Epoch157 train_loss=158.24034118652344; test_loss=388.0058288574219
Epoch158 train_loss=195.47036743164062; test_loss=395.4313049316406
Epoch159 train_loss=400.5770568847656; test_loss=417.751953125
Epoch160 train_loss=729.0758666992188; test_loss=416.59356689453125
Epoch161 train_loss=433.7228088378906; test_loss=465.9109802246094
Epoch162 train_loss=467.833740234375; test_loss=377.77960205078125
Epoch163 train_loss=252.71817016601562; test_loss=405.6697692871094
Epoch164 train_loss=578.115478515625; test_loss=380.76239013671875
Epoch165 train_loss=365.8515319824219; test_loss=401.3714904785156
Epoch166 train_loss=150.09707641601562; test_loss=426.1661071777344
Epoch167 train_loss=278.25067138671875; test_loss=374.36798095703125
Epoch168 train_loss=718.6796264648438; test_loss=391.2330627441406
Epoch169 train_loss=210.63565063476562; test_loss=394.0208435058594
Epoch170 train_loss=543.4459228515625; test_loss=403.33740234375
Epoch171 train_loss=580.4092407226562; test_loss=446.6399841308594
Epoch172 train_loss=484.29010009765625; test_loss=413.4519348144531
Epoch173 train_loss=323.4953918457031; test_loss=393.1669006347656
Epoch174 train_loss=265.3792724609375; test_loss=388.4755554199219
Epoch175 train_loss=270.2159118652344; test_loss=401.6399841308594
Epoch176 train_loss=238.7205810546875; test_loss=365.9750671386719
Epoch177 train_loss=179.62966918945312; test_loss=391.08740234375
Epoch178 train_loss=334.75604248046875; test_loss=392.58209228515625
Epoch179 train_loss=159.04437255859375; test_loss=400.267578125
Epoch180 train_loss=1038.08984375; test_loss=379.1469421386719
Epoch181 train_loss=668.956298828125; test_loss=392.3106384277344
Epoch182 train_loss=386.9512023925781; test_loss=450.69573974609375
Epoch183 train_loss=654.6539306640625; test_loss=394.91790771484375
Epoch184 train_loss=360.7735595703125; test_loss=373.34356689453125
Epoch185 train_loss=596.81884765625; test_loss=373.05560302734375
Epoch186 train_loss=606.0843505859375; test_loss=400.9853820800781
Epoch187 train_loss=417.50677490234375; test_loss=381.7913818359375
Epoch188 train_loss=765.2748413085938; test_loss=384.2928161621094
Epoch189 train_loss=143.5946044921875; test_loss=365.3558349609375
Epoch190 train_loss=514.6268920898438; test_loss=414.1216735839844
Epoch191 train_loss=386.50433349609375; test_loss=404.43804931640625
Epoch192 train_loss=228.88552856445312; test_loss=466.4360656738281
Epoch193 train_loss=266.5098876953125; test_loss=362.02886962890625
Epoch194 train_loss=404.68475341796875; test_loss=492.0688781738281
Epoch195 train_loss=423.05438232421875; test_loss=370.3533935546875
Epoch196 train_loss=482.565673828125; test_loss=410.2372741699219
Epoch197 train_loss=344.82965087890625; test_loss=422.62628173828125
Epoch198 train_loss=744.4771728515625; test_loss=400.5494079589844
Epoch199 train_loss=768.9254150390625; test_loss=370.78118896484375
Epoch200 train_loss=206.92202758789062; test_loss=374.56744384765625
Epoch201 train_loss=201.3799591064453; test_loss=390.8028564453125
Epoch202 train_loss=205.02505493164062; test_loss=378.5395812988281
Epoch203 train_loss=80.27162170410156; test_loss=362.278076171875
Epoch204 train_loss=383.3375549316406; test_loss=371.5338439941406
Epoch205 train_loss=564.522705078125; test_loss=394.7876892089844
Epoch206 train_loss=126.38006591796875; test_loss=439.224853515625
Epoch207 train_loss=234.90606689453125; test_loss=539.894775390625
Epoch208 train_loss=644.6416015625; test_loss=520.4864501953125
Epoch209 train_loss=382.53594970703125; test_loss=469.85406494140625
Epoch210 train_loss=261.53997802734375; test_loss=539.727783203125
Epoch211 train_loss=366.97198486328125; test_loss=553.4786376953125
Epoch212 train_loss=349.7514343261719; test_loss=524.8785400390625
Epoch213 train_loss=619.3818969726562; test_loss=540.2440795898438
Epoch214 train_loss=501.613525390625; test_loss=536.5728759765625
Epoch215 train_loss=674.4666137695312; test_loss=533.7216796875
Epoch216 train_loss=252.0653839111328; test_loss=529.68603515625
Epoch217 train_loss=329.62750244140625; test_loss=519.9722900390625
Epoch218 train_loss=478.01300048828125; test_loss=522.797119140625
Epoch219 train_loss=325.8164367675781; test_loss=532.363525390625
Epoch220 train_loss=350.989990234375; test_loss=524.9464111328125
Epoch221 train_loss=484.0020751953125; test_loss=530.2752075195312
Epoch222 train_loss=327.4801025390625; test_loss=544.4475708007812
Epoch223 train_loss=534.5032348632812; test_loss=526.257080078125
Epoch224 train_loss=598.8815307617188; test_loss=632.3544311523438
Epoch225 train_loss=660.5484008789062; test_loss=526.4053344726562
Epoch226 train_loss=375.8337097167969; test_loss=553.8766479492188
Epoch227 train_loss=687.7307739257812; test_loss=519.244140625
Epoch228 train_loss=579.3404541015625; test_loss=520.2059936523438
Epoch229 train_loss=377.0467224121094; test_loss=534.8837890625
Epoch230 train_loss=679.75; test_loss=519.2982177734375
Epoch231 train_loss=528.2332763671875; test_loss=520.0702514648438
Epoch232 train_loss=1010.25; test_loss=521.160888671875
Epoch233 train_loss=492.63055419921875; test_loss=524.9179077148438
Epoch234 train_loss=423.810546875; test_loss=519.896484375
Epoch235 train_loss=525.2061767578125; test_loss=536.7009887695312
Epoch236 train_loss=670.5545043945312; test_loss=527.4848022460938
Epoch237 train_loss=980.6278686523438; test_loss=524.8250122070312
Epoch238 train_loss=525.1713256835938; test_loss=524.6900024414062
Epoch239 train_loss=357.7987060546875; test_loss=522.62890625
Epoch240 train_loss=650.25; test_loss=518.2640991210938
Epoch241 train_loss=521.5648193359375; test_loss=536.5042724609375
Epoch242 train_loss=454.58343505859375; test_loss=521.1510009765625
Epoch243 train_loss=866.9366455078125; test_loss=521.5259399414062
Epoch244 train_loss=431.0274353027344; test_loss=522.5408325195312
Epoch245 train_loss=556.522705078125; test_loss=521.03173828125
Epoch246 train_loss=165.99339294433594; test_loss=532.7368774414062
Epoch247 train_loss=247.61927795410156; test_loss=522.688720703125
Epoch248 train_loss=975.695068359375; test_loss=532.190673828125
Epoch249 train_loss=553.1943969726562; test_loss=537.54052734375
Epoch250 train_loss=492.6214294433594; test_loss=523.9541015625
Epoch251 train_loss=430.94390869140625; test_loss=517.8563842773438
Epoch252 train_loss=583.3251953125; test_loss=514.9044189453125
Epoch253 train_loss=597.4065551757812; test_loss=531.620849609375
Epoch254 train_loss=635.6248779296875; test_loss=519.6013793945312
Epoch255 train_loss=251.3543701171875; test_loss=529.863525390625
Epoch256 train_loss=742.8682861328125; test_loss=581.5658569335938
Epoch257 train_loss=398.1937561035156; test_loss=528.1688232421875
Epoch258 train_loss=539.3475341796875; test_loss=536.8089599609375
Epoch259 train_loss=227.1748046875; test_loss=528.0178833007812
Epoch260 train_loss=361.01171875; test_loss=523.4151000976562
Epoch261 train_loss=832.2201538085938; test_loss=545.8639526367188
Epoch262 train_loss=855.3173217773438; test_loss=516.0585327148438
Epoch263 train_loss=964.3497314453125; test_loss=526.7501831054688
Epoch264 train_loss=354.92138671875; test_loss=523.1618041992188
Epoch265 train_loss=506.9691162109375; test_loss=526.7090454101562
Epoch266 train_loss=495.45013427734375; test_loss=528.043212890625
Epoch267 train_loss=683.9804077148438; test_loss=536.6657104492188
Epoch268 train_loss=379.5; test_loss=524.66552734375
Epoch269 train_loss=505.57037353515625; test_loss=523.9575805664062
Epoch270 train_loss=501.0455322265625; test_loss=533.8168334960938
Epoch271 train_loss=271.60784912109375; test_loss=524.5756225585938
Epoch272 train_loss=503.9358215332031; test_loss=526.8121337890625
Epoch273 train_loss=211.74310302734375; test_loss=533.1124877929688
Epoch274 train_loss=595.2235717773438; test_loss=521.2098999023438
Epoch275 train_loss=746.3377685546875; test_loss=518.741455078125
Epoch276 train_loss=576.1948852539062; test_loss=517.9251098632812
Epoch277 train_loss=553.6038208007812; test_loss=538.4288330078125
Epoch278 train_loss=430.1765441894531; test_loss=518.4835205078125
Epoch279 train_loss=937.25; test_loss=523.01025390625
Epoch280 train_loss=481.7212219238281; test_loss=519.6084594726562
Epoch281 train_loss=519.1754150390625; test_loss=527.2041015625
Epoch282 train_loss=466.422119140625; test_loss=517.3302001953125
Epoch283 train_loss=385.2859191894531; test_loss=520.145263671875
Epoch284 train_loss=184.27243041992188; test_loss=521.5215454101562
Epoch285 train_loss=791.8680419921875; test_loss=560.4234008789062
Epoch286 train_loss=821.0; test_loss=523.5335083007812
Epoch287 train_loss=768.91748046875; test_loss=518.1443481445312
Epoch288 train_loss=415.4494323730469; test_loss=519.9690551757812
Epoch289 train_loss=511.9181823730469; test_loss=520.8760986328125
Epoch290 train_loss=439.969482421875; test_loss=518.0985717773438
Epoch291 train_loss=607.3568115234375; test_loss=524.6799926757812
Epoch292 train_loss=80.35491943359375; test_loss=531.91552734375
Epoch293 train_loss=907.1826782226562; test_loss=535.6408081054688
Epoch294 train_loss=306.4442138671875; test_loss=544.322509765625
Epoch295 train_loss=426.33837890625; test_loss=518.8511962890625
Epoch296 train_loss=197.74343872070312; test_loss=534.4188842773438
Epoch297 train_loss=597.941650390625; test_loss=516.8783569335938
Epoch298 train_loss=797.1520385742188; test_loss=530.6429443359375
Epoch299 train_loss=552.298828125; test_loss=524.1263427734375
Epoch300 train_loss=403.7677001953125; test_loss=522.2687377929688
Epoch301 train_loss=713.191650390625; test_loss=526.2670288085938
Epoch302 train_loss=459.6029357910156; test_loss=516.5555419921875
Epoch303 train_loss=1041.99609375; test_loss=531.942626953125
Epoch304 train_loss=672.643310546875; test_loss=519.136962890625
Epoch305 train_loss=638.8040161132812; test_loss=515.1990966796875
Epoch306 train_loss=678.75; test_loss=523.1697387695312
Epoch307 train_loss=874.25; test_loss=523.1497802734375
Epoch308 train_loss=420.5; test_loss=553.357666015625
Epoch309 train_loss=284.850341796875; test_loss=526.0503540039062
Epoch310 train_loss=534.6943359375; test_loss=554.6220092773438
Epoch311 train_loss=438.6484680175781; test_loss=520.4447021484375
Epoch312 train_loss=595.9619140625; test_loss=515.35400390625
Epoch313 train_loss=568.3428344726562; test_loss=532.7429809570312
Epoch314 train_loss=477.6668395996094; test_loss=514.450927734375
Epoch315 train_loss=612.4603881835938; test_loss=523.7842407226562
Epoch316 train_loss=744.3536376953125; test_loss=520.9994506835938
Epoch317 train_loss=507.5311584472656; test_loss=515.4358520507812
Epoch318 train_loss=455.1343688964844; test_loss=515.4271240234375
Epoch319 train_loss=522.0; test_loss=519.2881469726562
Epoch320 train_loss=816.10546875; test_loss=516.7326049804688
Epoch321 train_loss=326.5760803222656; test_loss=518.8400268554688
Epoch322 train_loss=590.7154541015625; test_loss=527.2236328125
Epoch323 train_loss=639.5; test_loss=528.482421875
Epoch324 train_loss=554.805419921875; test_loss=521.5934448242188
Epoch325 train_loss=360.091552734375; test_loss=519.7314453125
Epoch326 train_loss=350.8770751953125; test_loss=522.547607421875
Epoch327 train_loss=377.1694641113281; test_loss=517.6067504882812
Epoch328 train_loss=663.0979614257812; test_loss=548.2212524414062
Epoch329 train_loss=391.79351806640625; test_loss=520.8407592773438
Epoch330 train_loss=440.5549621582031; test_loss=514.1939086914062
Epoch331 train_loss=711.8133544921875; test_loss=516.4276123046875
Epoch332 train_loss=666.630126953125; test_loss=518.40283203125
Epoch333 train_loss=507.7557373046875; test_loss=514.6708984375
Epoch334 train_loss=371.93145751953125; test_loss=542.2728881835938
Epoch335 train_loss=177.92425537109375; test_loss=526.1931762695312
Epoch336 train_loss=344.3946533203125; test_loss=545.67431640625
Epoch337 train_loss=338.7923583984375; test_loss=533.2821044921875
Epoch338 train_loss=560.7410278320312; test_loss=518.7297973632812
Epoch339 train_loss=424.8748779296875; test_loss=530.7642822265625
Epoch340 train_loss=626.4400024414062; test_loss=517.9771728515625
Epoch341 train_loss=610.56005859375; test_loss=517.686767578125
Epoch342 train_loss=363.16680908203125; test_loss=519.049072265625
Epoch343 train_loss=832.8521728515625; test_loss=517.1979370117188
Epoch344 train_loss=692.87939453125; test_loss=517.0403442382812
Epoch345 train_loss=851.29345703125; test_loss=540.8184814453125
Epoch346 train_loss=578.408203125; test_loss=513.7423095703125
Epoch347 train_loss=568.5574951171875; test_loss=524.8818359375
Epoch348 train_loss=676.6401977539062; test_loss=535.5308227539062
Epoch349 train_loss=846.9998779296875; test_loss=523.6387329101562
Epoch350 train_loss=370.050048828125; test_loss=528.2078857421875
Epoch351 train_loss=432.6827392578125; test_loss=517.1715087890625
Epoch352 train_loss=779.4449462890625; test_loss=514.0576782226562
Epoch353 train_loss=715.210693359375; test_loss=524.4285888671875
Epoch354 train_loss=546.9788208007812; test_loss=520.766845703125
Epoch355 train_loss=693.1159057617188; test_loss=518.6891479492188
Epoch356 train_loss=506.0; test_loss=530.19921875
Epoch357 train_loss=311.18524169921875; test_loss=567.120849609375
Epoch358 train_loss=335.0; test_loss=514.4109497070312
Epoch359 train_loss=771.25; test_loss=525.4487915039062
Epoch360 train_loss=400.3424072265625; test_loss=516.5567626953125
Epoch361 train_loss=386.4358825683594; test_loss=516.7737426757812
Epoch362 train_loss=649.8878173828125; test_loss=516.9108276367188
Epoch363 train_loss=459.4631652832031; test_loss=527.2938842773438
Epoch364 train_loss=452.1565246582031; test_loss=530.1830444335938
Epoch365 train_loss=489.6092834472656; test_loss=513.9402465820312
Epoch366 train_loss=582.0409545898438; test_loss=521.5117797851562
Epoch367 train_loss=533.504638671875; test_loss=524.9617309570312
Epoch368 train_loss=893.5; test_loss=513.58740234375
Epoch369 train_loss=496.58203125; test_loss=529.0856323242188
Epoch370 train_loss=502.4562683105469; test_loss=528.4248046875
Epoch371 train_loss=308.04144287109375; test_loss=515.6224365234375
Epoch372 train_loss=503.79498291015625; test_loss=517.298583984375
Epoch373 train_loss=508.9002685546875; test_loss=515.0775756835938
Epoch374 train_loss=612.071533203125; test_loss=517.9794921875
Epoch375 train_loss=782.1046142578125; test_loss=539.5902709960938
Epoch376 train_loss=428.49969482421875; test_loss=522.8658447265625
Epoch377 train_loss=578.3284912109375; test_loss=515.2936401367188
Epoch378 train_loss=411.0343933105469; test_loss=516.8219604492188
Epoch379 train_loss=544.0474853515625; test_loss=517.273193359375
Epoch380 train_loss=335.23016357421875; test_loss=517.3096923828125
Epoch381 train_loss=693.691650390625; test_loss=522.6599731445312
Epoch382 train_loss=875.2633666992188; test_loss=526.7559814453125
Epoch383 train_loss=619.2339477539062; test_loss=525.08544921875
Epoch384 train_loss=291.04449462890625; test_loss=514.1336059570312
Epoch385 train_loss=350.9857177734375; test_loss=517.552978515625
Epoch386 train_loss=300.8140869140625; test_loss=525.8464965820312
Epoch387 train_loss=273.72662353515625; test_loss=517.2315063476562
Epoch388 train_loss=561.6517333984375; test_loss=521.3978271484375
Epoch389 train_loss=373.04461669921875; test_loss=515.5313720703125
Epoch390 train_loss=653.3338012695312; test_loss=542.8701782226562
Epoch391 train_loss=668.8719482421875; test_loss=521.706787109375
Epoch392 train_loss=815.078125; test_loss=516.7345581054688
Epoch393 train_loss=722.999267578125; test_loss=515.023681640625
Epoch394 train_loss=429.12896728515625; test_loss=512.61474609375
Epoch395 train_loss=1266.0; test_loss=514.1939086914062
Epoch396 train_loss=590.56640625; test_loss=518.0146484375
Epoch397 train_loss=792.8927612304688; test_loss=527.9410400390625
Epoch398 train_loss=719.5; test_loss=517.6015625
Epoch399 train_loss=408.8865966796875; test_loss=524.76025390625
Epoch400 train_loss=485.6746826171875; test_loss=514.754150390625
Epoch401 train_loss=802.0614013671875; test_loss=515.1174926757812
Epoch402 train_loss=736.3558959960938; test_loss=521.2772216796875
Epoch403 train_loss=364.4532470703125; test_loss=543.7721557617188
Epoch404 train_loss=437.6103515625; test_loss=521.974609375
Epoch405 train_loss=615.9281005859375; test_loss=531.4989624023438
Epoch406 train_loss=246.7747802734375; test_loss=518.4337158203125
Epoch407 train_loss=435.8287658691406; test_loss=512.3893432617188
Epoch408 train_loss=575.775146484375; test_loss=514.3208618164062
Epoch409 train_loss=778.2816162109375; test_loss=519.813720703125
Epoch410 train_loss=664.435791015625; test_loss=515.0470581054688
Epoch411 train_loss=283.259521484375; test_loss=533.0140991210938
Epoch412 train_loss=254.40670776367188; test_loss=514.893798828125
Epoch413 train_loss=578.251953125; test_loss=519.3262939453125
Epoch414 train_loss=498.1705322265625; test_loss=513.6082153320312
Epoch415 train_loss=786.7908325195312; test_loss=520.30029296875
Epoch416 train_loss=192.01060485839844; test_loss=520.197021484375
Epoch417 train_loss=736.25; test_loss=515.9095458984375
Epoch418 train_loss=310.0494384765625; test_loss=519.0281372070312
Epoch419 train_loss=346.4100646972656; test_loss=522.78076171875
Epoch420 train_loss=494.40924072265625; test_loss=521.426025390625
Epoch421 train_loss=569.93310546875; test_loss=517.6007080078125
Epoch422 train_loss=652.3800048828125; test_loss=523.7261962890625
Epoch423 train_loss=663.0267944335938; test_loss=515.2310180664062
Epoch424 train_loss=716.7315673828125; test_loss=520.9696044921875
Epoch425 train_loss=897.4886474609375; test_loss=514.5723266601562
Epoch426 train_loss=739.9423217773438; test_loss=514.2978515625
Epoch427 train_loss=245.27821350097656; test_loss=523.8162841796875
Epoch428 train_loss=234.28004455566406; test_loss=513.7922973632812
Epoch429 train_loss=235.22901916503906; test_loss=514.7819213867188
Epoch430 train_loss=509.297607421875; test_loss=521.2713012695312
Epoch431 train_loss=467.2318115234375; test_loss=520.6516723632812
Epoch432 train_loss=571.1824951171875; test_loss=516.8743896484375
Epoch433 train_loss=540.9321899414062; test_loss=513.57275390625
Epoch434 train_loss=980.0; test_loss=520.9392700195312
Epoch435 train_loss=280.49603271484375; test_loss=513.0435180664062
Epoch436 train_loss=783.239990234375; test_loss=546.6339721679688
Epoch437 train_loss=381.3565673828125; test_loss=543.9536743164062
Epoch438 train_loss=736.109130859375; test_loss=515.4039306640625
Epoch439 train_loss=407.7358093261719; test_loss=520.406005859375
Epoch440 train_loss=456.76556396484375; test_loss=519.9171142578125
Epoch441 train_loss=331.8631591796875; test_loss=514.6734008789062
Epoch442 train_loss=864.0; test_loss=532.3313598632812
Epoch443 train_loss=932.2766723632812; test_loss=518.2339477539062
Epoch444 train_loss=332.50543212890625; test_loss=528.458251953125
Epoch445 train_loss=472.252685546875; test_loss=519.3052978515625
Epoch446 train_loss=685.0; test_loss=560.4680786132812
Epoch447 train_loss=435.92822265625; test_loss=516.4931030273438
Epoch448 train_loss=323.32330322265625; test_loss=525.451416015625
Epoch449 train_loss=387.7554016113281; test_loss=517.029052734375
Epoch450 train_loss=368.9699401855469; test_loss=514.1713256835938
Epoch451 train_loss=112.31657409667969; test_loss=526.405029296875
Epoch452 train_loss=659.2623291015625; test_loss=521.7272338867188
Epoch453 train_loss=553.3963012695312; test_loss=540.8348388671875
Epoch454 train_loss=865.7579956054688; test_loss=529.9682006835938
Epoch455 train_loss=316.1813659667969; test_loss=517.4395751953125
Epoch456 train_loss=605.9622802734375; test_loss=515.1375732421875
Epoch457 train_loss=326.25; test_loss=544.9706420898438
Epoch458 train_loss=466.0; test_loss=532.3545532226562
Epoch459 train_loss=428.858642578125; test_loss=513.5755615234375
Epoch460 train_loss=1001.0; test_loss=520.9703979492188
Epoch461 train_loss=498.278076171875; test_loss=518.6704711914062
Epoch462 train_loss=473.78436279296875; test_loss=519.3123779296875
Epoch463 train_loss=576.7055053710938; test_loss=518.4757690429688
Epoch464 train_loss=486.3822021484375; test_loss=518.0478515625
Epoch465 train_loss=768.0841064453125; test_loss=518.652587890625
Epoch466 train_loss=251.80142211914062; test_loss=514.8778076171875
Epoch467 train_loss=403.6661682128906; test_loss=523.0101318359375
Epoch468 train_loss=483.7471923828125; test_loss=515.2238159179688
Epoch469 train_loss=645.8297119140625; test_loss=513.8283081054688
Epoch470 train_loss=563.956298828125; test_loss=515.6226196289062
Epoch471 train_loss=593.75; test_loss=517.6285400390625
Epoch472 train_loss=627.2957763671875; test_loss=529.0841064453125
Epoch473 train_loss=336.136962890625; test_loss=525.2825927734375
Epoch474 train_loss=402.7308654785156; test_loss=529.727783203125
Epoch475 train_loss=556.178955078125; test_loss=522.9192504882812
Epoch476 train_loss=136.69180297851562; test_loss=512.7640991210938
Epoch477 train_loss=553.164794921875; test_loss=510.53924560546875
Epoch478 train_loss=543.2108764648438; test_loss=506.0005187988281
Epoch479 train_loss=246.1962127685547; test_loss=504.2373046875
Epoch480 train_loss=489.98748779296875; test_loss=508.0540771484375
Epoch481 train_loss=371.08734130859375; test_loss=502.1199645996094
Epoch482 train_loss=252.31959533691406; test_loss=500.2503662109375
Epoch483 train_loss=580.1043701171875; test_loss=500.2585144042969
Epoch484 train_loss=186.40789794921875; test_loss=497.82977294921875
Epoch485 train_loss=287.434326171875; test_loss=497.03118896484375
Epoch486 train_loss=183.21617126464844; test_loss=498.2108459472656
Epoch487 train_loss=551.8947143554688; test_loss=497.216064453125
Epoch488 train_loss=539.7901000976562; test_loss=489.3103332519531
Epoch489 train_loss=420.4890441894531; test_loss=486.38934326171875
Epoch490 train_loss=232.71881103515625; test_loss=486.1595458984375
Epoch491 train_loss=735.848388671875; test_loss=491.1759338378906
Epoch492 train_loss=129.77342224121094; test_loss=494.2226867675781
Epoch493 train_loss=364.0887451171875; test_loss=489.5952453613281
Epoch494 train_loss=406.6512145996094; test_loss=495.1360778808594
Epoch495 train_loss=386.1277160644531; test_loss=484.4741516113281
Epoch496 train_loss=191.53781127929688; test_loss=488.35443115234375
Epoch497 train_loss=235.40484619140625; test_loss=482.3476257324219
Epoch498 train_loss=882.2462158203125; test_loss=487.3484191894531
Epoch499 train_loss=902.625732421875; test_loss=483.58746337890625
Epoch500 train_loss=377.70294189453125; test_loss=538.1429443359375
Epoch501 train_loss=319.9187927246094; test_loss=528.2748413085938
Epoch502 train_loss=527.523681640625; test_loss=559.8433837890625
Epoch503 train_loss=727.666259765625; test_loss=526.9893798828125
Epoch504 train_loss=320.11541748046875; test_loss=536.5308227539062
Epoch505 train_loss=557.21435546875; test_loss=534.6486206054688
Epoch506 train_loss=435.3521423339844; test_loss=521.8282470703125
Epoch507 train_loss=477.13720703125; test_loss=518.7813110351562
Epoch508 train_loss=700.8783569335938; test_loss=524.3865966796875
Epoch509 train_loss=639.1759033203125; test_loss=544.204833984375
Epoch510 train_loss=547.742919921875; test_loss=527.331298828125
Epoch511 train_loss=812.1633911132812; test_loss=549.3839111328125
Epoch512 train_loss=281.4730224609375; test_loss=515.8173828125
Epoch513 train_loss=210.34625244140625; test_loss=533.0989990234375
Epoch514 train_loss=380.3983154296875; test_loss=515.4107666015625
Epoch515 train_loss=491.83001708984375; test_loss=514.8983154296875
Epoch516 train_loss=461.01873779296875; test_loss=523.5157470703125
Epoch517 train_loss=525.0366821289062; test_loss=522.684326171875
Epoch518 train_loss=305.82861328125; test_loss=516.0578002929688
Epoch519 train_loss=923.75; test_loss=518.6680297851562
Epoch520 train_loss=556.5; test_loss=566.7366943359375
Epoch521 train_loss=377.1761474609375; test_loss=521.07177734375
Epoch522 train_loss=376.2894287109375; test_loss=521.7029418945312
Epoch523 train_loss=411.24383544921875; test_loss=514.1643676757812
Epoch524 train_loss=925.344482421875; test_loss=515.89697265625
Epoch525 train_loss=500.72039794921875; test_loss=543.0316772460938
Epoch526 train_loss=691.882080078125; test_loss=513.3301391601562
Epoch527 train_loss=959.75; test_loss=515.631591796875
Epoch528 train_loss=324.08868408203125; test_loss=520.099609375
Epoch529 train_loss=710.878662109375; test_loss=517.6091918945312
Epoch530 train_loss=243.76718139648438; test_loss=526.08984375
Epoch531 train_loss=614.0; test_loss=513.6434936523438
Epoch532 train_loss=651.9431762695312; test_loss=514.0519409179688
Epoch533 train_loss=559.25; test_loss=510.8013000488281
Epoch534 train_loss=114.24880981445312; test_loss=514.86865234375
Epoch535 train_loss=452.9695739746094; test_loss=530.6914672851562
Epoch536 train_loss=529.17578125; test_loss=542.7416381835938
Epoch537 train_loss=799.931640625; test_loss=522.7047119140625
Epoch538 train_loss=654.4337158203125; test_loss=557.3785400390625
Epoch539 train_loss=462.0550842285156; test_loss=517.8714599609375
Epoch540 train_loss=383.5; test_loss=513.2316284179688
Epoch541 train_loss=417.7677001953125; test_loss=518.6160888671875
Epoch542 train_loss=260.71234130859375; test_loss=520.1310424804688
Epoch543 train_loss=974.54541015625; test_loss=520.7059936523438
Epoch544 train_loss=528.7626953125; test_loss=518.6665649414062
Epoch545 train_loss=483.806396484375; test_loss=512.9824829101562
Epoch546 train_loss=308.32366943359375; test_loss=525.7474975585938
Epoch547 train_loss=126.07463073730469; test_loss=517.0438842773438
Epoch548 train_loss=675.25; test_loss=512.3948974609375
Epoch549 train_loss=711.0439453125; test_loss=518.5033569335938
Epoch550 train_loss=497.5413818359375; test_loss=520.9608154296875
Epoch551 train_loss=462.45501708984375; test_loss=512.7815551757812
Epoch552 train_loss=388.26702880859375; test_loss=527.584228515625
Epoch553 train_loss=842.2325439453125; test_loss=518.9375610351562
Epoch554 train_loss=598.78173828125; test_loss=515.64794921875
Epoch555 train_loss=618.2022094726562; test_loss=513.443359375
Epoch556 train_loss=680.75; test_loss=513.0840454101562
Epoch557 train_loss=670.1405029296875; test_loss=511.4398193359375
Epoch558 train_loss=582.2908935546875; test_loss=528.658447265625
Epoch559 train_loss=221.80899047851562; test_loss=553.7901000976562
Epoch560 train_loss=545.6937866210938; test_loss=511.62054443359375
Epoch561 train_loss=832.0; test_loss=526.5172729492188
Epoch562 train_loss=454.6326904296875; test_loss=510.92449951171875
Epoch563 train_loss=278.48980712890625; test_loss=520.9747314453125
Epoch564 train_loss=882.25; test_loss=517.9826049804688
Epoch565 train_loss=458.9747009277344; test_loss=543.5872192382812
Epoch566 train_loss=343.7262268066406; test_loss=516.9722900390625
Epoch567 train_loss=150.31387329101562; test_loss=516.631103515625
Epoch568 train_loss=220.0439453125; test_loss=520.7041015625
Epoch569 train_loss=813.312255859375; test_loss=517.0848388671875
Epoch570 train_loss=830.0; test_loss=521.7202758789062
Epoch571 train_loss=771.459228515625; test_loss=511.7698669433594
Epoch572 train_loss=952.746826171875; test_loss=532.6212158203125
Epoch573 train_loss=513.8869018554688; test_loss=515.88525390625
Epoch574 train_loss=1133.0; test_loss=514.25341796875
Epoch575 train_loss=939.07568359375; test_loss=517.298583984375
Epoch576 train_loss=360.5694580078125; test_loss=515.1910400390625
Epoch577 train_loss=681.357666015625; test_loss=519.7107543945312
Epoch578 train_loss=422.85699462890625; test_loss=511.475830078125
Epoch579 train_loss=414.34906005859375; test_loss=530.9723510742188
Epoch580 train_loss=641.0426635742188; test_loss=518.6688232421875
Epoch581 train_loss=569.8748168945312; test_loss=515.498779296875
Epoch582 train_loss=332.0549011230469; test_loss=522.3612060546875
Epoch583 train_loss=232.63571166992188; test_loss=526.2056884765625
Epoch584 train_loss=352.13232421875; test_loss=511.7146301269531
Epoch585 train_loss=435.2261047363281; test_loss=516.2454223632812
Epoch586 train_loss=611.5744018554688; test_loss=512.02685546875
Epoch587 train_loss=726.2589111328125; test_loss=511.4444580078125
Epoch588 train_loss=701.3827514648438; test_loss=536.9869384765625
Epoch589 train_loss=640.8273315429688; test_loss=531.2850952148438
Epoch590 train_loss=121.77593994140625; test_loss=513.2801513671875
Epoch591 train_loss=1044.814453125; test_loss=513.9363403320312
Epoch592 train_loss=690.225830078125; test_loss=514.4082641601562
Epoch593 train_loss=381.0334777832031; test_loss=525.7958984375
Epoch594 train_loss=698.5559692382812; test_loss=509.8252258300781
Epoch595 train_loss=412.90447998046875; test_loss=530.3673706054688
Epoch596 train_loss=680.88671875; test_loss=511.9759216308594
Epoch597 train_loss=707.5223999023438; test_loss=536.7576904296875
Epoch598 train_loss=345.0473327636719; test_loss=525.8363037109375
Epoch599 train_loss=575.2453002929688; test_loss=518.630615234375
Epoch600 train_loss=1344.2880859375; test_loss=513.8590087890625
Epoch601 train_loss=842.9330444335938; test_loss=514.4913940429688
Epoch602 train_loss=340.611328125; test_loss=512.8495483398438
Epoch603 train_loss=132.54910278320312; test_loss=536.6294555664062
Epoch604 train_loss=682.6018676757812; test_loss=528.724853515625
Epoch605 train_loss=306.99908447265625; test_loss=511.5948181152344
Epoch606 train_loss=317.6871337890625; test_loss=518.5014038085938
Epoch607 train_loss=338.16265869140625; test_loss=512.3753662109375
Epoch608 train_loss=352.43389892578125; test_loss=516.5098266601562
Epoch609 train_loss=430.70611572265625; test_loss=514.3783569335938
Epoch610 train_loss=441.6463928222656; test_loss=513.3530883789062
Epoch611 train_loss=450.11541748046875; test_loss=511.1791687011719
Epoch612 train_loss=180.78111267089844; test_loss=513.9892578125
Epoch613 train_loss=465.63433837890625; test_loss=521.3837280273438
Epoch614 train_loss=893.7520751953125; test_loss=552.6226196289062
Epoch615 train_loss=658.4812622070312; test_loss=529.066650390625
Epoch616 train_loss=504.4781188964844; test_loss=512.747314453125
Epoch617 train_loss=938.3854370117188; test_loss=511.2577819824219
Epoch618 train_loss=706.5; test_loss=509.6548156738281
Epoch619 train_loss=706.9281005859375; test_loss=541.8804321289062
Epoch620 train_loss=1231.170166015625; test_loss=521.23193359375
Epoch621 train_loss=424.40557861328125; test_loss=510.6138000488281
Epoch622 train_loss=582.3065795898438; test_loss=528.8154907226562
Epoch623 train_loss=408.058837890625; test_loss=512.2669677734375
Epoch624 train_loss=310.43292236328125; test_loss=521.7711791992188
Epoch625 train_loss=431.0233154296875; test_loss=512.3556518554688
Epoch626 train_loss=591.7088623046875; test_loss=512.5831298828125
Epoch627 train_loss=407.7876281738281; test_loss=518.307861328125
Epoch628 train_loss=857.75; test_loss=530.6083374023438
Epoch629 train_loss=376.1976318359375; test_loss=535.2678833007812
Epoch630 train_loss=715.282470703125; test_loss=514.6012573242188
Epoch631 train_loss=483.7952575683594; test_loss=570.2225952148438
Epoch632 train_loss=470.8789367675781; test_loss=523.00732421875
Epoch633 train_loss=487.289306640625; test_loss=511.658935546875
Epoch634 train_loss=528.94287109375; test_loss=543.2831420898438
Epoch635 train_loss=516.8772583007812; test_loss=512.224609375
Epoch636 train_loss=606.75; test_loss=523.0364990234375
Epoch637 train_loss=427.403076171875; test_loss=520.835205078125
Epoch638 train_loss=1065.4736328125; test_loss=514.6734008789062
Epoch639 train_loss=777.25; test_loss=517.9111938476562
Epoch640 train_loss=849.91015625; test_loss=524.8929443359375
Epoch641 train_loss=630.75; test_loss=511.4472351074219
Epoch642 train_loss=605.75; test_loss=531.33154296875
Epoch643 train_loss=763.4984130859375; test_loss=516.953369140625
Epoch644 train_loss=436.1119384765625; test_loss=522.3284912109375
Epoch645 train_loss=476.6951904296875; test_loss=514.652099609375
Epoch646 train_loss=921.7659912109375; test_loss=513.3853759765625
Epoch647 train_loss=411.37542724609375; test_loss=518.8912963867188
Epoch648 train_loss=996.25; test_loss=514.6561279296875
Epoch649 train_loss=705.6853637695312; test_loss=511.859619140625
Epoch650 train_loss=622.5314331054688; test_loss=512.4096069335938
Epoch651 train_loss=330.25; test_loss=529.425537109375
Epoch652 train_loss=592.9176025390625; test_loss=511.5744934082031
Epoch653 train_loss=278.9854431152344; test_loss=520.7196044921875
Epoch654 train_loss=444.2972106933594; test_loss=536.0333251953125
Epoch655 train_loss=583.2457885742188; test_loss=523.490478515625
Epoch656 train_loss=861.292724609375; test_loss=513.9010620117188
Epoch657 train_loss=484.26959228515625; test_loss=528.354248046875
Epoch658 train_loss=432.06353759765625; test_loss=515.0648803710938
Epoch659 train_loss=492.53997802734375; test_loss=516.0142822265625
Epoch660 train_loss=571.2703247070312; test_loss=514.951416015625
Epoch661 train_loss=857.6270751953125; test_loss=524.90771484375
Epoch662 train_loss=319.11126708984375; test_loss=511.06866455078125
Epoch663 train_loss=511.3167419433594; test_loss=514.6923217773438
Epoch664 train_loss=549.0; test_loss=512.22216796875
Epoch665 train_loss=531.53271484375; test_loss=509.30157470703125
Epoch666 train_loss=349.4307861328125; test_loss=510.9725036621094
Epoch667 train_loss=440.02923583984375; test_loss=523.0972900390625
Epoch668 train_loss=508.67633056640625; test_loss=508.8753356933594
Epoch669 train_loss=207.76441955566406; test_loss=512.0380249023438
Epoch670 train_loss=793.3749389648438; test_loss=517.2449340820312
Epoch671 train_loss=576.9173583984375; test_loss=516.8203125
Epoch672 train_loss=510.40252685546875; test_loss=515.9448852539062
Epoch673 train_loss=459.51287841796875; test_loss=510.7784118652344
Epoch674 train_loss=581.320068359375; test_loss=515.6703491210938
Epoch675 train_loss=611.8341064453125; test_loss=515.3827514648438
Epoch676 train_loss=665.836181640625; test_loss=515.740478515625
Epoch677 train_loss=731.190185546875; test_loss=514.14697265625
Epoch678 train_loss=401.52435302734375; test_loss=530.2698364257812
Epoch679 train_loss=235.06784057617188; test_loss=517.4402465820312
Epoch680 train_loss=150.54359436035156; test_loss=521.1063232421875
Epoch681 train_loss=301.03271484375; test_loss=513.3522338867188
Epoch682 train_loss=114.98220825195312; test_loss=516.959228515625
Epoch683 train_loss=982.8804931640625; test_loss=509.8345947265625
Epoch684 train_loss=623.7377319335938; test_loss=512.1283569335938
Epoch685 train_loss=597.9011840820312; test_loss=510.0518493652344
Epoch686 train_loss=456.0683898925781; test_loss=528.1317749023438
Epoch687 train_loss=511.359130859375; test_loss=519.9028930664062
Epoch688 train_loss=646.46728515625; test_loss=510.17486572265625
Epoch689 train_loss=765.6775512695312; test_loss=527.486083984375
Epoch690 train_loss=310.77197265625; test_loss=509.77545166015625
Epoch691 train_loss=978.75; test_loss=519.8004760742188
Epoch692 train_loss=589.5; test_loss=546.4126586914062
Epoch693 train_loss=815.756103515625; test_loss=511.8193054199219
Epoch694 train_loss=331.0625; test_loss=513.294189453125
Epoch695 train_loss=586.8955078125; test_loss=512.1576538085938
Epoch696 train_loss=505.66021728515625; test_loss=514.371826171875
Epoch697 train_loss=878.8245849609375; test_loss=513.1948852539062
Epoch698 train_loss=265.2819519042969; test_loss=517.3303833007812
Epoch699 train_loss=392.29766845703125; test_loss=572.521728515625
Epoch700 train_loss=672.2848510742188; test_loss=536.9030151367188
Epoch701 train_loss=388.52264404296875; test_loss=513.8547973632812
Epoch702 train_loss=1027.703369140625; test_loss=578.7478637695312
Epoch703 train_loss=810.7028198242188; test_loss=512.4813232421875
Epoch704 train_loss=380.3062744140625; test_loss=511.7311096191406
Epoch705 train_loss=437.2685546875; test_loss=514.03515625
Epoch706 train_loss=383.3165283203125; test_loss=521.9103393554688
Epoch707 train_loss=403.13861083984375; test_loss=520.88720703125
Epoch708 train_loss=543.5343017578125; test_loss=517.983154296875
Epoch709 train_loss=338.595703125; test_loss=515.7548217773438
Epoch710 train_loss=749.845947265625; test_loss=518.542724609375
Epoch711 train_loss=649.5921630859375; test_loss=511.7200012207031
Epoch712 train_loss=333.6147155761719; test_loss=509.9482727050781
Epoch713 train_loss=241.3286590576172; test_loss=525.274169921875
Epoch714 train_loss=634.5576782226562; test_loss=517.0426635742188
Epoch715 train_loss=422.3252868652344; test_loss=509.4622802734375
Epoch716 train_loss=550.8341674804688; test_loss=515.0997924804688
Epoch717 train_loss=602.93994140625; test_loss=516.4996948242188
Epoch718 train_loss=324.4453125; test_loss=510.38916015625
Epoch719 train_loss=379.04071044921875; test_loss=533.1654663085938
Epoch720 train_loss=494.782470703125; test_loss=512.7127075195312
Epoch721 train_loss=425.3116455078125; test_loss=519.7697143554688
Epoch722 train_loss=360.71624755859375; test_loss=556.7233276367188
Epoch723 train_loss=229.7537841796875; test_loss=519.9020385742188
Epoch724 train_loss=575.80615234375; test_loss=511.02703857421875
Epoch725 train_loss=611.5945434570312; test_loss=511.2038879394531
Epoch726 train_loss=649.1785888671875; test_loss=512.7918701171875
Epoch727 train_loss=611.87353515625; test_loss=513.3386840820312
Epoch728 train_loss=682.0203247070312; test_loss=514.5264282226562
Epoch729 train_loss=512.4747314453125; test_loss=522.63916015625
Epoch730 train_loss=461.66314697265625; test_loss=516.2681884765625
Epoch731 train_loss=794.1051635742188; test_loss=510.2592468261719
Epoch732 train_loss=479.2327575683594; test_loss=513.0535888671875
Epoch733 train_loss=298.087890625; test_loss=527.1427001953125
Epoch734 train_loss=226.05319213867188; test_loss=514.591064453125
Epoch735 train_loss=694.0697631835938; test_loss=511.97772216796875
Epoch736 train_loss=472.39361572265625; test_loss=515.693359375
Epoch737 train_loss=68.5; test_loss=516.6973266601562
Epoch738 train_loss=731.8543701171875; test_loss=512.03955078125
Epoch739 train_loss=430.0846862792969; test_loss=516.7550048828125
Epoch740 train_loss=390.0777893066406; test_loss=511.105224609375
Epoch741 train_loss=603.25; test_loss=510.02545166015625
Epoch742 train_loss=361.43603515625; test_loss=509.8844909667969
Epoch743 train_loss=339.36334228515625; test_loss=512.111572265625
Epoch744 train_loss=415.00286865234375; test_loss=513.93798828125
Epoch745 train_loss=578.9579467773438; test_loss=511.75714111328125
Epoch746 train_loss=427.352783203125; test_loss=543.9906616210938
Epoch747 train_loss=752.0804443359375; test_loss=512.051025390625
Epoch748 train_loss=787.8602294921875; test_loss=531.4830322265625
Epoch749 train_loss=706.892822265625; test_loss=509.93072509765625
Epoch750 train_loss=863.9758911132812; test_loss=509.42230224609375
Epoch751 train_loss=759.25; test_loss=532.5347900390625
Epoch752 train_loss=382.11767578125; test_loss=513.1520385742188
Epoch753 train_loss=456.20556640625; test_loss=508.8895263671875
Epoch754 train_loss=989.3438720703125; test_loss=512.5418701171875
Epoch755 train_loss=245.21331787109375; test_loss=511.4166564941406
Epoch756 train_loss=592.58203125; test_loss=512.3443603515625
Epoch757 train_loss=400.1097412109375; test_loss=509.6563415527344
Epoch758 train_loss=554.771484375; test_loss=521.3265380859375
Epoch759 train_loss=210.1686248779297; test_loss=510.639892578125
Epoch760 train_loss=415.46270751953125; test_loss=519.5111083984375
Epoch761 train_loss=660.0172119140625; test_loss=571.0580444335938
Epoch762 train_loss=525.9686279296875; test_loss=544.587646484375
Epoch763 train_loss=589.3788452148438; test_loss=514.1404418945312
Epoch764 train_loss=312.62469482421875; test_loss=509.811767578125
Epoch765 train_loss=893.191162109375; test_loss=513.6244506835938
Epoch766 train_loss=462.49395751953125; test_loss=509.2339782714844
Epoch767 train_loss=266.4871520996094; test_loss=514.3861694335938
Epoch768 train_loss=655.154296875; test_loss=519.57568359375
Epoch769 train_loss=105.0589599609375; test_loss=516.9667358398438
Epoch770 train_loss=1070.724853515625; test_loss=511.3829650878906
Epoch771 train_loss=711.4437255859375; test_loss=510.7742614746094
Epoch772 train_loss=524.145751953125; test_loss=513.0020141601562
Epoch773 train_loss=487.05072021484375; test_loss=516.6436767578125
Epoch774 train_loss=754.8549194335938; test_loss=509.942138671875
Epoch775 train_loss=539.75; test_loss=513.4052734375
Epoch776 train_loss=668.7052001953125; test_loss=519.9042358398438
Epoch777 train_loss=513.5; test_loss=510.1986999511719
Epoch778 train_loss=417.5155334472656; test_loss=512.1958618164062
Epoch779 train_loss=837.75; test_loss=526.056640625
Epoch780 train_loss=700.5; test_loss=510.8919372558594
Epoch781 train_loss=727.75; test_loss=518.9532470703125
Epoch782 train_loss=374.92388916015625; test_loss=512.3055419921875
Epoch783 train_loss=586.5869140625; test_loss=514.3087158203125
Epoch784 train_loss=642.30712890625; test_loss=510.0063781738281
Epoch785 train_loss=522.5; test_loss=514.0708618164062
Epoch786 train_loss=700.3930053710938; test_loss=517.16650390625
Epoch787 train_loss=414.7911376953125; test_loss=510.2645263671875
Epoch788 train_loss=426.3205871582031; test_loss=510.0287170410156
Epoch789 train_loss=533.7823486328125; test_loss=533.5196533203125
Epoch790 train_loss=951.0316772460938; test_loss=508.4520263671875
Epoch791 train_loss=536.8409423828125; test_loss=511.2970275878906
Epoch792 train_loss=127.06507873535156; test_loss=509.4504699707031
Epoch793 train_loss=1075.0; test_loss=512.12353515625
Epoch794 train_loss=516.3702392578125; test_loss=515.0372924804688
Epoch795 train_loss=549.745849609375; test_loss=510.8525085449219
Epoch796 train_loss=679.1707153320312; test_loss=525.1571044921875
Epoch797 train_loss=421.15765380859375; test_loss=513.9559936523438
Epoch798 train_loss=646.6640014648438; test_loss=518.3958129882812
Epoch799 train_loss=263.9368896484375; test_loss=510.7995300292969
Epoch800 train_loss=511.5; test_loss=511.0493469238281
Epoch801 train_loss=453.23785400390625; test_loss=532.3349609375
Epoch802 train_loss=372.51641845703125; test_loss=513.50146484375
Epoch803 train_loss=653.0086669921875; test_loss=512.7945556640625
Epoch804 train_loss=671.8275146484375; test_loss=522.548583984375
Epoch805 train_loss=640.75; test_loss=509.7198181152344
Epoch806 train_loss=856.3778076171875; test_loss=519.5125732421875
Epoch807 train_loss=169.3826141357422; test_loss=511.2947692871094
Epoch808 train_loss=932.4674682617188; test_loss=511.24249267578125
Epoch809 train_loss=651.6552124023438; test_loss=515.2545166015625
Epoch810 train_loss=470.980224609375; test_loss=515.1408081054688
Epoch811 train_loss=763.75; test_loss=530.5714111328125
Epoch812 train_loss=963.8402709960938; test_loss=518.5930786132812
Epoch813 train_loss=386.7669677734375; test_loss=509.1327209472656
Epoch814 train_loss=626.29296875; test_loss=508.5506896972656
Epoch815 train_loss=545.9293823242188; test_loss=518.020751953125
Epoch816 train_loss=797.941162109375; test_loss=511.0039367675781
Epoch817 train_loss=251.09442138671875; test_loss=511.3768615722656
Epoch818 train_loss=414.69989013671875; test_loss=510.44305419921875
Epoch819 train_loss=684.3189086914062; test_loss=513.6982421875
Epoch820 train_loss=495.51055908203125; test_loss=521.927001953125
Epoch821 train_loss=402.74310302734375; test_loss=524.6427612304688
Epoch822 train_loss=592.2117919921875; test_loss=509.7654113769531
Epoch823 train_loss=483.39422607421875; test_loss=517.5902709960938
Epoch824 train_loss=522.64599609375; test_loss=520.6463623046875
Epoch825 train_loss=614.1091918945312; test_loss=549.751220703125
Epoch826 train_loss=294.7044982910156; test_loss=511.9527893066406
Epoch827 train_loss=538.1585693359375; test_loss=518.253173828125
Epoch828 train_loss=409.75433349609375; test_loss=511.1843566894531
Epoch829 train_loss=323.7706298828125; test_loss=510.7705993652344
Epoch830 train_loss=358.75; test_loss=512.0824584960938
Epoch831 train_loss=289.722412109375; test_loss=513.1376342773438
Epoch832 train_loss=153.29833984375; test_loss=514.977294921875
Epoch833 train_loss=748.5018920898438; test_loss=509.7088317871094
Epoch834 train_loss=380.63751220703125; test_loss=512.5252075195312
Epoch835 train_loss=715.4464111328125; test_loss=510.065185546875
Epoch836 train_loss=320.68902587890625; test_loss=508.1268615722656
Epoch837 train_loss=767.1292724609375; test_loss=518.2886962890625
Epoch838 train_loss=648.8970336914062; test_loss=606.4939575195312
Epoch839 train_loss=588.5802612304688; test_loss=521.2660522460938
Epoch840 train_loss=318.3489074707031; test_loss=510.4168395996094
Epoch841 train_loss=488.25; test_loss=533.4767456054688
Epoch842 train_loss=720.94091796875; test_loss=568.3336181640625
Epoch843 train_loss=648.8514404296875; test_loss=510.1580505371094
Epoch844 train_loss=779.6747436523438; test_loss=530.2589111328125
Epoch845 train_loss=306.8376770019531; test_loss=512.943603515625
Epoch846 train_loss=455.5049743652344; test_loss=515.8228759765625
Epoch847 train_loss=261.39544677734375; test_loss=509.3638916015625
Epoch848 train_loss=474.5965881347656; test_loss=512.398193359375
Epoch849 train_loss=395.4737548828125; test_loss=510.1218566894531
Epoch850 train_loss=316.200439453125; test_loss=511.3568115234375
Epoch851 train_loss=551.3676147460938; test_loss=511.15533447265625
Epoch852 train_loss=711.6058959960938; test_loss=520.3463134765625
Epoch853 train_loss=373.9122619628906; test_loss=515.289306640625
Epoch854 train_loss=296.730712890625; test_loss=509.765380859375
Epoch855 train_loss=696.1024780273438; test_loss=515.6919555664062
Epoch856 train_loss=354.8938903808594; test_loss=507.86907958984375
Epoch857 train_loss=399.1190185546875; test_loss=511.2862548828125
Epoch858 train_loss=526.5361938476562; test_loss=525.986083984375
Epoch859 train_loss=979.5; test_loss=514.3555297851562
Epoch860 train_loss=738.75; test_loss=509.4003601074219
Epoch861 train_loss=291.8285827636719; test_loss=515.224853515625
Epoch862 train_loss=722.384033203125; test_loss=511.19378662109375
Epoch863 train_loss=444.02130126953125; test_loss=514.0997314453125
Epoch864 train_loss=249.21315002441406; test_loss=513.8536376953125
Epoch865 train_loss=651.95849609375; test_loss=537.6544189453125
Epoch866 train_loss=701.6521606445312; test_loss=525.9580688476562
Epoch867 train_loss=442.5048828125; test_loss=519.4728393554688
Epoch868 train_loss=766.1121826171875; test_loss=514.737060546875
Epoch869 train_loss=442.549560546875; test_loss=511.4075012207031
Epoch870 train_loss=367.044189453125; test_loss=510.6329650878906
Epoch871 train_loss=383.4055480957031; test_loss=511.42388916015625
Epoch872 train_loss=516.5; test_loss=511.15875244140625
Epoch873 train_loss=526.21044921875; test_loss=510.57037353515625
Epoch874 train_loss=323.8515625; test_loss=519.8196411132812
Epoch875 train_loss=574.3020629882812; test_loss=519.2792358398438
Epoch876 train_loss=636.9649658203125; test_loss=508.6229553222656
Epoch877 train_loss=558.4620361328125; test_loss=548.066162109375
Epoch878 train_loss=1497.2305908203125; test_loss=513.9315795898438
Epoch879 train_loss=556.08203125; test_loss=511.3967590332031
Epoch880 train_loss=626.164794921875; test_loss=513.474609375
Epoch881 train_loss=308.1175537109375; test_loss=508.96258544921875
Epoch882 train_loss=567.1307373046875; test_loss=509.4193420410156
Epoch883 train_loss=562.9913330078125; test_loss=511.7369384765625
Epoch884 train_loss=853.0; test_loss=509.3885192871094
Epoch885 train_loss=234.13299560546875; test_loss=518.9844360351562
Epoch886 train_loss=766.916015625; test_loss=508.9683837890625
Epoch887 train_loss=565.65234375; test_loss=511.957763671875
Epoch888 train_loss=237.8590545654297; test_loss=509.3558654785156
Epoch889 train_loss=350.333251953125; test_loss=514.600341796875
Epoch890 train_loss=798.4775390625; test_loss=531.3975830078125
Epoch891 train_loss=658.0364990234375; test_loss=507.9197998046875
Epoch892 train_loss=301.1288757324219; test_loss=529.1892700195312
Epoch893 train_loss=526.2802734375; test_loss=509.7146301269531
Epoch894 train_loss=302.4924621582031; test_loss=509.4431457519531
Epoch895 train_loss=654.25; test_loss=527.6327514648438
Epoch896 train_loss=361.2104187011719; test_loss=513.2406616210938
Epoch897 train_loss=378.70880126953125; test_loss=513.6722412109375
Epoch898 train_loss=527.6363525390625; test_loss=513.8289184570312
Epoch899 train_loss=722.1800537109375; test_loss=514.2855834960938
Epoch900 train_loss=685.4700927734375; test_loss=529.0721435546875
Epoch901 train_loss=670.5; test_loss=513.4784545898438
Epoch902 train_loss=642.25; test_loss=508.7694396972656
Epoch903 train_loss=466.66290283203125; test_loss=510.4363098144531
Epoch904 train_loss=833.5131225585938; test_loss=514.8245239257812
Epoch905 train_loss=944.5; test_loss=529.4015502929688
Epoch906 train_loss=694.75; test_loss=517.7677612304688
Epoch907 train_loss=756.1366577148438; test_loss=547.0355834960938
Epoch908 train_loss=653.25; test_loss=511.86029052734375
Epoch909 train_loss=724.0301513671875; test_loss=510.2636413574219
Epoch910 train_loss=466.8385009765625; test_loss=510.77984619140625
Epoch911 train_loss=390.41461181640625; test_loss=512.9404907226562
Epoch912 train_loss=560.58984375; test_loss=508.4036560058594
Epoch913 train_loss=530.4576416015625; test_loss=527.766845703125
Epoch914 train_loss=265.60589599609375; test_loss=527.5225830078125
Epoch915 train_loss=536.115478515625; test_loss=517.3572998046875
Epoch916 train_loss=752.2492065429688; test_loss=510.75726318359375
Epoch917 train_loss=634.8507080078125; test_loss=520.8139038085938
Epoch918 train_loss=409.47027587890625; test_loss=523.9625854492188
Epoch919 train_loss=631.286376953125; test_loss=509.92425537109375
Epoch920 train_loss=192.91787719726562; test_loss=524.1420288085938
Epoch921 train_loss=298.9095458984375; test_loss=512.386962890625
Epoch922 train_loss=393.67510986328125; test_loss=518.5853271484375
Epoch923 train_loss=544.2467041015625; test_loss=512.032470703125
Epoch924 train_loss=522.6809692382812; test_loss=515.86328125
Epoch925 train_loss=871.5133666992188; test_loss=524.5964965820312
Epoch926 train_loss=357.609375; test_loss=511.78033447265625
Epoch927 train_loss=468.0078430175781; test_loss=509.978515625
Epoch928 train_loss=715.4075927734375; test_loss=530.3115234375
Epoch929 train_loss=520.9844970703125; test_loss=509.256103515625
Epoch930 train_loss=948.4535522460938; test_loss=521.152587890625
Epoch931 train_loss=488.75; test_loss=509.58740234375
Epoch932 train_loss=207.21707153320312; test_loss=523.3802490234375
Epoch933 train_loss=647.7152099609375; test_loss=519.235107421875
Epoch934 train_loss=625.5567626953125; test_loss=508.41204833984375
Epoch935 train_loss=761.2431640625; test_loss=512.2605590820312
Epoch936 train_loss=207.85816955566406; test_loss=525.0857543945312
Epoch937 train_loss=346.7274169921875; test_loss=523.2202758789062
Epoch938 train_loss=334.14739990234375; test_loss=516.2326049804688
Epoch939 train_loss=266.0731201171875; test_loss=515.7564697265625
Epoch940 train_loss=782.1122436523438; test_loss=529.0809936523438
Epoch941 train_loss=380.472412109375; test_loss=509.08453369140625
Epoch942 train_loss=469.22259521484375; test_loss=510.1913757324219
Epoch943 train_loss=580.046875; test_loss=511.6817626953125
Epoch944 train_loss=436.4432678222656; test_loss=512.352294921875
Epoch945 train_loss=591.9083862304688; test_loss=510.81829833984375
Epoch946 train_loss=605.0775146484375; test_loss=513.8308715820312
Epoch947 train_loss=979.5223388671875; test_loss=517.3121948242188
Epoch948 train_loss=375.05548095703125; test_loss=518.8837280273438
Epoch949 train_loss=411.916259765625; test_loss=512.3715209960938
Epoch950 train_loss=607.5; test_loss=510.1538391113281
Epoch951 train_loss=342.2864990234375; test_loss=520.51025390625
Epoch952 train_loss=499.999267578125; test_loss=523.456787109375
Epoch953 train_loss=399.1004333496094; test_loss=509.5325012207031
Epoch954 train_loss=389.9927978515625; test_loss=514.6261596679688
Epoch955 train_loss=665.22021484375; test_loss=527.0917358398438
Epoch956 train_loss=453.0; test_loss=508.4857482910156
Epoch957 train_loss=312.52264404296875; test_loss=507.3971252441406
Epoch958 train_loss=182.71243286132812; test_loss=508.71649169921875
Epoch959 train_loss=555.9562377929688; test_loss=550.913818359375
Epoch960 train_loss=644.8704833984375; test_loss=508.4523010253906
Epoch961 train_loss=1118.72802734375; test_loss=509.38934326171875
Epoch962 train_loss=154.5577392578125; test_loss=526.9310302734375
Epoch963 train_loss=436.0479736328125; test_loss=525.4364013671875
Epoch964 train_loss=572.7408447265625; test_loss=516.6483764648438
Epoch965 train_loss=276.98309326171875; test_loss=511.54681396484375
Epoch966 train_loss=570.984130859375; test_loss=510.8829650878906
Epoch967 train_loss=756.9221801757812; test_loss=512.8570556640625
Epoch968 train_loss=384.7109375; test_loss=512.73193359375
Epoch969 train_loss=1061.75; test_loss=534.60546875
Epoch970 train_loss=654.780029296875; test_loss=507.1831359863281
Epoch971 train_loss=572.7544555664062; test_loss=520.7056274414062
Epoch972 train_loss=848.9573974609375; test_loss=534.4089965820312
Epoch973 train_loss=367.5375671386719; test_loss=517.6083374023438
Epoch974 train_loss=409.8013916015625; test_loss=513.251953125
Epoch975 train_loss=615.3492431640625; test_loss=543.0680541992188
Epoch976 train_loss=234.5081787109375; test_loss=508.6007385253906
Epoch977 train_loss=198.557861328125; test_loss=513.3204345703125
Epoch978 train_loss=171.93975830078125; test_loss=519.9849243164062
Epoch979 train_loss=465.53094482421875; test_loss=510.6808776855469
Epoch980 train_loss=884.4794921875; test_loss=527.85009765625
Epoch981 train_loss=249.17465209960938; test_loss=525.4366455078125
Epoch982 train_loss=610.75; test_loss=516.5526123046875
Epoch983 train_loss=446.2960205078125; test_loss=512.2568969726562
Epoch984 train_loss=793.25; test_loss=520.337890625
Epoch985 train_loss=756.2145385742188; test_loss=512.7034912109375
Epoch986 train_loss=455.20611572265625; test_loss=526.2702026367188
Epoch987 train_loss=720.2510375976562; test_loss=511.3952331542969
Epoch988 train_loss=453.80194091796875; test_loss=527.4204711914062
Epoch989 train_loss=866.708740234375; test_loss=533.5161743164062
Epoch990 train_loss=642.3763427734375; test_loss=527.6524047851562
Epoch991 train_loss=581.3656005859375; test_loss=508.0131530761719
Epoch992 train_loss=607.2669677734375; test_loss=511.60638427734375
Epoch993 train_loss=374.46295166015625; test_loss=518.6779174804688
Epoch994 train_loss=706.1585083007812; test_loss=509.0665588378906
Epoch995 train_loss=512.8662109375; test_loss=517.9116821289062
Epoch996 train_loss=268.5616455078125; test_loss=513.5342407226562
Epoch997 train_loss=496.71484375; test_loss=510.5344543457031
Epoch998 train_loss=406.970703125; test_loss=513.8914794921875
Epoch999 train_loss=615.1522216796875; test_loss=519.458251953125
Epoch1000 train_loss=449.462646484375; test_loss=507.57232666015625
------------------------------------------- TRAINING SCORES -------------------------------------------
Overall MAE: 506.3859067382813 +/- 213.8931333902926
Overall RMSE: 614.7109517364502 +/- 249.68981121865465
Overall MAPE: 105.72283459234238 +/- 106.76437929482991
Overall R2: -146.1750120282173 +/- 1050.4377217347533
------------------------------------------- TESTING SCORES -------------------------------------------
Overall MAE: 496.89939270019534 +/- 48.49974129023237
Overall RMSE: 640.2378510131836 +/- 49.66673463202634
Overall MAPE: 101.05311183166503 +/- 19.11839109918788
Overall R2: 13.97423979640007 +/- 12.406959037785937
Wall time: 10min 11s
%%time
# Now we do 10-Fold CV on our DNN model with 45 vars:
# here are the best parameters for the 45 vars DNN hyperparameter search:
# lr: 0.006250169080446751
# batch_size: 16
# HL0_ac_fn: relu
# HL1_ac_fn: relu
# HL2_ac_fn: relu
# HL3_ac_fn: relu
# Sequential(
# (0): Linear(in_features=45, out_features=100, bias=True)
# (1): ReLU()
# (2): Linear(in_features=100, out_features=50, bias=True)
# (3): ReLU()
# (4): Linear(in_features=50, out_features=25, bias=True)
# (5): ReLU()
# (6): Linear(in_features=25, out_features=12, bias=True)
# (7): ReLU()
# (8): Linear(in_features=12, out_features=1, bias=True)
# )
metrics = [RootMeanSquaredError(), "mean_absolute_percentage_error",
"mean_absolute_error"]
num_folds = 10
kfold = KFold(n_splits=10, shuffle=True, random_state=1)
# we define the cross validator, and other variables:
lr = 0.006250169080446751
batch_size = 16
num_epochs = 100
num_folds = 10
kfold = KFold(n_splits=num_folds, shuffle=True, random_state=1)
fold_num = 1
MAPE_train = []
MAPE_scores = []
RMSE_train = []
RMSE_scores = []
MAE_train = []
MAE_scores = []
R2_train = []
R2_scores = []
low_MAPE_scores = []
low_RMSE_scores = []
low_MAE_scores = []
low_R2_scores = []
med_MAPE_scores = []
med_RMSE_scores = []
med_MAE_scores = []
med_R2_scores = []
high_MAPE_scores = []
high_RMSE_scores = []
high_MAE_scores = []
high_R2_scores = []
for train, test in kfold.split(X, Y):
# now our data is ready to go into our model.
model = Sequential([
Dense(100, activation='relu', input_shape=(X.shape[1],)),
Dense(50, activation='relu'),
Dense(25, activation='relu'),
Dense(12, activation='relu'),
Dense(1, activation='linear')
])
model.compile(loss='mae', optimizer=Adam(learning_rate=lr), metrics=metrics)
history = model.fit(X[train], Y[train], batch_size=16, epochs=100, verbose=False)
scores = model.evaluate(X[test], Y[test], verbose=False)
# we split the Y[test] into 3 inclusive volumes: low (0-299), medium (300-750), high (>= 751)
# here test and Y[test] are numpy ndarrays
# test has shape (675,), Y[test] has size around (675,45)
# Y[test][0] is the same as calling Y[test[0]] which has shape (45,)
# print("test.shape", test.shape)
# print(test)
# print("Y[test].shape",Y[test].shape)
# print(Y[test])
low_Y_test_mask = np.all([Y[test] >= 0, Y[test] <=299], axis=0)
med_Y_test_mask = np.all([Y[test] >= 300, Y[test] <=750], axis=0)
high_Y_test_mask = np.all([Y[test] >= 751],axis=0)
# print(low_Y_test_mask.shape)
# print(low_Y_test_mask)
# print("Y[test][low_Y_test_mask].shape",Y[test][low_Y_test_mask].shape)
# print(Y[test][low_Y_test_mask])
# print(med_Y_test_mask.shape)
# print(med_Y_test_mask)
# print("Y[test][med_Y_test_mask].shape",Y[test][med_Y_test_mask].shape)
# print(Y[test][med_Y_test_mask])
# print(high_Y_test_mask.shape)
# print(high_Y_test_mask)
# print("Y[test][high_Y_test_mask].shape",Y[test][high_Y_test_mask].shape)
# print(Y[test][high_Y_test_mask])
low_scores = model.evaluate(X[test][low_Y_test_mask], Y[test][low_Y_test_mask], verbose=False)
med_scores = model.evaluate(X[test][med_Y_test_mask], Y[test][med_Y_test_mask], verbose=False)
high_scores = model.evaluate(X[test][high_Y_test_mask], Y[test][high_Y_test_mask], verbose=False)
low_MAPE_scores.append(low_scores[2])
low_RMSE_scores.append(low_scores[1])
low_MAE_scores.append(low_scores[0])
low_R2_scores.append(r2_score(Y[test][low_Y_test_mask], model.predict(X[test][low_Y_test_mask])))
med_MAPE_scores.append(med_scores[2])
med_RMSE_scores.append(med_scores[1])
med_MAE_scores.append(med_scores[0])
med_R2_scores.append(r2_score(Y[test][med_Y_test_mask], model.predict(X[test][med_Y_test_mask])))
high_MAPE_scores.append(high_scores[2])
high_RMSE_scores.append(high_scores[1])
high_MAE_scores.append(high_scores[0])
high_R2_scores.append(r2_score(Y[test][high_Y_test_mask], model.predict(X[test][high_Y_test_mask])))
train_r2 = r2_score(Y[train], model.predict(X[train]))
R2_train.append(train_r2)
score_r2 = r2_score(Y[test], model.predict(X[test]))
R2_scores.append(score_r2)
MAPE_train.append(history.history.get(
'mean_absolute_percentage_error')[-1])
MAPE_scores.append(scores[2])
RMSE_train.append(history.history.get('root_mean_squared_error')[-1])
RMSE_scores.append(scores[1])
MAE_train.append(history.history.get('loss')[-1])
MAE_scores.append(scores[0])
print(
f"------------------------------------------Fold {fold_num}------------------------------------------")
print(
f"\tTrain Loss: {history.history.get('loss')[-1]}\tScore Loss: {scores[0]}")
print(
f"\tTrain RMSE: {history.history.get('root_mean_squared_error')[-1]}\tScore RMSE: {scores[1]}")
print(
f"\tTrain MAPE: {history.history.get('mean_absolute_percentage_error')[-1]}\tScore MAPE: {scores[2]}")
if scores[2] == 100.0:
print(history.history.get('mean_absolute_percentage_error'))
print(f"\tTrain R2: {train_r2}\tScore R2: {score_r2}")
print(f"\n\t3-VOLUME SCORES: LOW (n={Y[test][low_Y_test_mask].shape[0]}), MED (n={Y[test][med_Y_test_mask].shape[0]}), HIGH (n={Y[test][high_Y_test_mask].shape[0]}), TOTAL IN FOLD (n={test.shape[0]})")
print(f"\tLow Loss: {low_MAE_scores[-1]}\tMed Loss: {med_MAE_scores[-1]}\tHigh Loss: {high_MAE_scores[-1]}")
print(f"\tLow RMSE: {low_RMSE_scores[-1]}\tMed RMSE: {med_RMSE_scores[-1]}\tHigh RMSE: {high_RMSE_scores[-1]}")
print(f"\tLow MAPE: {low_MAPE_scores[-1]}\tMed MAPE: {med_MAPE_scores[-1]}\tHigh MAPE: {high_MAPE_scores[-1]}")
print(f"\tLow R2: {low_R2_scores[-1]}\tMed R2: {med_R2_scores[-1]}\tHigh R2: {high_R2_scores[-1]}")
fold_num += 1
print(f"\n\nLow MAE Average: {np.mean(low_MAE_scores)} +/- {np.std(low_MAE_scores)}")
print(f"Low RMSE Average: {np.mean(low_RMSE_scores)} +/- {np.std(low_RMSE_scores)}")
print(f"Low MAPE Average: {np.mean(low_MAPE_scores)} +/- {np.std(low_MAPE_scores)}")
print(f"Low R2 Average: {np.mean(low_R2_scores)*100} +/- {np.std(low_R2_scores)*100}\n")
print(f"\nMed MAE Average: {np.mean(med_MAE_scores)} +/- {np.std(med_MAE_scores)}")
print(f"Med RMSE Average: {np.mean(med_RMSE_scores)} +/- {np.std(med_RMSE_scores)}")
print(f"Med MAPE Average: {np.mean(med_MAPE_scores)} +/- {np.std(med_MAPE_scores)}")
print(f"Med R2 Average: {np.mean(med_R2_scores)*100} +/- {np.std(med_R2_scores)*100}\n")
print(f"\nHigh MAE Average: {np.mean(high_MAE_scores)} +/- {np.std(high_MAE_scores)}")
print(f"High RMSE Average: {np.mean(high_RMSE_scores)} +/- {np.std(high_RMSE_scores)}")
print(f"High MAPE Average: {np.mean(high_MAPE_scores)} +/- {np.std(high_MAPE_scores)}")
print(f"High R2 Average: {np.mean(high_R2_scores)*100} +/- {np.std(high_R2_scores)*100}\n")
print(f"\n\nOverall MAE: {np.mean(MAE_scores)} +/- {np.std(MAE_scores)}")
print(f"Overall RMSE: {np.mean(RMSE_scores)} +/- {np.std(RMSE_scores)}")
print(f"Overall MAPE: {np.mean(MAPE_scores)} +/- {np.std(MAPE_scores)}")
print(f"Overall R2: {np.mean(R2_scores)*100} +/- {np.std(R2_scores)*100}\n\n")
print("Delimited table:")
print("MAPE\tRMSE\tMAE\tR2")
for i in range(0, 10):
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f" %
(MAPE_train[i], MAPE_scores[i], RMSE_train[i], RMSE_scores[i], MAE_train[i], MAE_scores[i], R2_train[i], R2_scores[i]))
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t<===Averages" % (np.mean(MAPE_train), np.mean(MAPE_scores),
np.mean(RMSE_train), np.mean(RMSE_scores), np.mean(MAE_train), np.mean(MAE_scores), np.mean(R2_train), np.mean(R2_scores)))
------------------------------------------Fold 1------------------------------------------ Train Loss: 847.4215087890625 Score Loss: 861.412841796875 Train RMSE: 1088.5302734375 Score RMSE: 1106.7781982421875 Train MAPE: 71.63470458984375 Score MAPE: 70.47389221191406 Train R2: -1.3918595275407855 Score R2: -1.4182643499057188 3-VOLUME SCORES: LOW (n=88), MED (n=178), HIGH (n=409), TOTAL IN FOLD (n=675) Low Loss: 64.68569946289062 Med Loss: 286.7408142089844 High Loss: 1282.9378662109375 Low RMSE: 80.41761779785156 Med RMSE: 312.65625 High RMSE: 1406.3056640625 Low MAPE: 52.8454475402832 Med MAPE: 52.10579299926758 High MAPE: 82.2607421875 Low R2: -0.3586961899791614 Med R2: -5.293048877913207 High R2: -4.961104498036262 ------------------------------------------Fold 2------------------------------------------ Train Loss: 387.82086181640625 Score Loss: 401.4930114746094 Train RMSE: 524.1527099609375 Score RMSE: 558.4675903320312 Train MAPE: 60.28167724609375 Score MAPE: 76.18029022216797 Train R2: 0.43153177031235657 Score R2: 0.3385075041123575 3-VOLUME SCORES: LOW (n=94), MED (n=166), HIGH (n=415), TOTAL IN FOLD (n=675) Low Loss: 343.2879333496094 Med Loss: 551.2552490234375 High Loss: 354.7719421386719 Low RMSE: 627.9502563476562 Med RMSE: 725.778564453125 High RMSE: 455.265380859375 Low MAPE: 232.14764404296875 Med MAPE: 113.37293243408203 High MAPE: 25.975683212280273 Low R2: -80.41724628901328 Med R2: -29.829796308811023 High R2: 0.2946782425293091 ------------------------------------------Fold 3------------------------------------------ Train Loss: 389.95843505859375 Score Loss: 385.32061767578125 Train RMSE: 525.5571899414062 Score RMSE: 529.3181762695312 Train MAPE: 62.555030822753906 Score MAPE: 70.7053451538086 Train R2: 0.4493944571627341 Score R2: 0.4751882032875536 3-VOLUME SCORES: LOW (n=100), MED (n=159), HIGH (n=416), TOTAL IN FOLD (n=675) Low Loss: 291.5736083984375 Med Loss: 483.9147033691406 High Loss: 370.17218017578125 Low RMSE: 552.7460327148438 Med RMSE: 629.4259643554688 High RMSE: 479.3200378417969 Low MAPE: 210.9174346923828 Med MAPE: 100.19063568115234 High MAPE: 25.730892181396484 Low R2: -70.95229633289767 Med R2: -22.3027305526873 High R2: 0.3412928291349705 ------------------------------------------Fold 4------------------------------------------ Train Loss: 849.5169067382812 Score Loss: 842.0427856445312 Train RMSE: 1091.4732666015625 Score RMSE: 1079.8016357421875 Train MAPE: 71.34760284423828 Score MAPE: 73.09644317626953 Train R2: -1.3925850925227823 Score R2: -1.4091826626623676 3-VOLUME SCORES: LOW (n=98), MED (n=164), HIGH (n=413), TOTAL IN FOLD (n=675) Low Loss: 66.10491943359375 Med Loss: 265.1289978027344 High Loss: 1255.252685546875 Low RMSE: 82.78710174560547 Med RMSE: 289.1405029296875 High RMSE: 1367.7789306640625 Low MAPE: 73.29533386230469 Med MAPE: 50.32068634033203 High MAPE: 82.09339141845703 Low R2: -0.4337662754008549 Med R2: -5.281699923885517 High R2: -5.3383130944106485 ------------------------------------------Fold 5------------------------------------------ Train Loss: 388.56365966796875 Score Loss: 374.11614990234375 Train RMSE: 514.2362060546875 Score RMSE: 501.18707275390625 Train MAPE: 58.72897720336914 Score MAPE: 67.2981185913086 Train R2: 0.49014662946250853 Score R2: 0.5134687471680686 3-VOLUME SCORES: LOW (n=107), MED (n=147), HIGH (n=421), TOTAL IN FOLD (n=675) Low Loss: 254.22422790527344 Med Loss: 436.44451904296875 High Loss: 382.82440185546875 Low RMSE: 408.2790832519531 Med RMSE: 590.0972290039062 High RMSE: 488.6566162109375 Low MAPE: 192.737548828125 Med MAPE: 89.2752685546875 High MAPE: 27.743106842041016 Low R2: -36.66681512705369 Med R2: -18.895230975245404 High R2: 0.29920739905917015 ------------------------------------------Fold 6------------------------------------------ Train Loss: 379.8049621582031 Score Loss: 373.32501220703125 Train RMSE: 509.7525329589844 Score RMSE: 493.6255187988281 Train MAPE: 56.25129318237305 Score MAPE: 62.278385162353516 Train R2: 0.5190513367179426 Score R2: 0.4798717945144255 3-VOLUME SCORES: LOW (n=101), MED (n=164), HIGH (n=410), TOTAL IN FOLD (n=675) Low Loss: 237.19326782226562 Med Loss: 482.69427490234375 High Loss: 363.11224365234375 Low RMSE: 361.96649169921875 Med RMSE: 610.5702514648438 High RMSE: 468.7895812988281 Low MAPE: 151.68212890625 Med MAPE: 96.82891845703125 High MAPE: 26.4343318939209 Low R2: -32.69908900269782 Med R2: -22.259913884065828 High R2: 0.2963982946546637 ------------------------------------------Fold 7------------------------------------------ Train Loss: 395.09130859375 Score Loss: 416.8564453125 Train RMSE: 524.30029296875 Score RMSE: 541.9400634765625 Train MAPE: 63.839534759521484 Score MAPE: 55.8616943359375 Train R2: 0.4526277190374006 Score R2: 0.4136747586734769 3-VOLUME SCORES: LOW (n=87), MED (n=155), HIGH (n=432), TOTAL IN FOLD (n=674) Low Loss: 258.3173828125 Med Loss: 402.2178649902344 High Loss: 454.0368347167969 Low RMSE: 400.96966552734375 Med RMSE: 509.14996337890625 High RMSE: 576.9177856445312 Low MAPE: 150.28335571289062 Med MAPE: 82.35160064697266 High MAPE: 27.341737747192383 Low R2: -35.601640893833924 Med R2: -14.097155530157172 High R2: -0.04074858346140586 ------------------------------------------Fold 8------------------------------------------ Train Loss: 380.4073486328125 Score Loss: 341.8569030761719 Train RMSE: 509.673583984375 Score RMSE: 459.4696044921875 Train MAPE: 59.017364501953125 Score MAPE: 57.22010803222656 Train R2: 0.4978915889756511 Score R2: 0.5537902892203889 3-VOLUME SCORES: LOW (n=111), MED (n=157), HIGH (n=406), TOTAL IN FOLD (n=674) Low Loss: 177.8846435546875 Med Loss: 398.5748596191406 High Loss: 364.7539367675781 Low RMSE: 288.17474365234375 Med RMSE: 520.21630859375 High RMSE: 472.34771728515625 Low MAPE: 145.53465270996094 Med MAPE: 77.7536392211914 High MAPE: 25.13470458984375 Low R2: -16.04157423841104 Med R2: -18.102946205853254 High R2: 0.2958634605406114 ------------------------------------------Fold 9------------------------------------------ Train Loss: 379.12945556640625 Score Loss: 367.4377136230469 Train RMSE: 509.2457580566406 Score RMSE: 490.4013671875 Train MAPE: 58.457977294921875 Score MAPE: 59.55173873901367 Train R2: 0.5301174101334377 Score R2: 0.4965733004925611 3-VOLUME SCORES: LOW (n=109), MED (n=176), HIGH (n=389), TOTAL IN FOLD (n=674) Low Loss: 240.695556640625 Med Loss: 383.6701965332031 High Loss: 395.60736083984375 Low RMSE: 421.9418640136719 Med RMSE: 521.8475952148438 High RMSE: 493.55126953125 Low MAPE: 141.15814208984375 Med MAPE: 81.27538299560547 High MAPE: 26.85647964477539 Low R2: -49.16777652671263 Med R2: -15.482705864640913 High R2: 0.19408563134697332 ------------------------------------------Fold 10------------------------------------------ Train Loss: 382.2039794921875 Score Loss: 409.3859558105469 Train RMSE: 512.3046264648438 Score RMSE: 539.3506469726562 Train MAPE: 61.82522201538086 Score MAPE: 52.500389099121094 Train R2: 0.46986568452579425 Score R2: 0.43049116423935174 3-VOLUME SCORES: LOW (n=79), MED (n=160), HIGH (n=435), TOTAL IN FOLD (n=674) Low Loss: 272.7973327636719 Med Loss: 366.505126953125 High Loss: 449.96392822265625 Low RMSE: 412.7549133300781 Med RMSE: 467.08221435546875 High RMSE: 582.7018432617188 Low MAPE: 146.60540771484375 Med MAPE: 75.84893798828125 High MAPE: 26.82209587097168 Low R2: -49.25612206111523 Med R2: -13.299456346326918 High R2: -0.06665523598682999 Low MAE Average: 220.67645721435548 +/- 87.20610269866046 Low RMSE Average: 363.7987770080566 +/- 167.21523813426208 Low MAPE Average: 149.72070960998536 +/- 52.64983482737128 Low R2 Average: -3715.9502293711525 +/- 2544.665609706329 Med MAE Average: 405.71466064453125 +/- 83.76941551509017 Med RMSE Average: 517.596484375 +/- 129.0684687038835 Med MAPE Average: 81.93237953186035 +/- 18.819402510635054 Med R2 Average: -1648.4468446958651 +/- 722.0345931874194 High MAE Average: 567.3433380126953 +/- 352.4581452636806 High RMSE Average: 679.1634826660156 +/- 356.4447333140483 High MAPE Average: 37.63931655883789 +/- 22.280622172769995 High R2 Average: -86.85295554629448 +/- 214.65951996812828 Overall MAE: 477.3247436523437 +/- 188.394499721673 Overall RMSE: 630.0339874267578 +/- 233.3717392048823 Overall MAPE: 64.5166404724121 +/- 7.7030973365700115 Overall R2: 8.741187491400975 +/- 75.26551409658562 Delimited table: MAPE RMSE MAE R2 71.63/70.47 1088.53/1106.78 847.42/861.41 -1.39/-1.42 60.28/76.18 524.15/558.47 387.82/401.49 0.43/0.34 62.56/70.71 525.56/529.32 389.96/385.32 0.45/0.48 71.35/73.10 1091.47/1079.80 849.52/842.04 -1.39/-1.41 58.73/67.30 514.24/501.19 388.56/374.12 0.49/0.51 56.25/62.28 509.75/493.63 379.80/373.33 0.52/0.48 63.84/55.86 524.30/541.94 395.09/416.86 0.45/0.41 59.02/57.22 509.67/459.47 380.41/341.86 0.50/0.55 58.46/59.55 509.25/490.40 379.13/367.44 0.53/0.50 61.83/52.50 512.30/539.35 382.20/409.39 0.47/0.43 62.39/64.52 630.92/630.03 477.99/477.32 0.11/0.09 <===Averages Wall time: 2min 30s
minmax = MinMaxScaler()
minmax_X = minmax.fit_transform(X)
print(f"minmax_X.shape: {minmax_X.shape}")
MM_X_train, MM_X_test, MM_Y_train, MM_Y_test = train_test_split(minmax_X, Y, test_size=0.2, random_state=42)
# convert np arrays to tensor, with float.
MM_X_train = torch.from_numpy(MM_X_train).float()
MM_X_test = torch.from_numpy(MM_X_test).float()
MM_Y_train = torch.reshape(torch.from_numpy(MM_Y_train).float(), (5396, 1))
MM_Y_test = torch.reshape(torch.from_numpy(MM_Y_test).float(), (1350, 1))
print(f"MM_X_train.shape={MM_X_train.shape} MM_X_test.shape={MM_X_test.shape}\nMM_Y_train.shape={MM_Y_train.shape} MM_Y_test.shape={MM_Y_test.shape}")
minmax_X.shape: (6746, 45) MM_X_train.shape=torch.Size([5396, 45]) MM_X_test.shape=torch.Size([1350, 45]) MM_Y_train.shape=torch.Size([5396, 1]) MM_Y_test.shape=torch.Size([1350, 1])
minmax_study_name = "minmax_45_vars_half_layer_each_time"
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
n_epochs = 100
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def objective_fn(trial):
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_float("lr", 1e-3, 1e-1, log=True)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
n_layers = trial.suggest_int('n_layers', 2, 5)
layers = []
in_features = 45
out_features = 0
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
layers.append(torch.nn.Linear(out_features, 1))
dnn_model = torch.nn.Sequential(*layers).to(device)
# print(dnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(dnn_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
dnn_model.train()
# we create a random permutation of numbers from MM_X_train.size()
permutation = torch.randperm(MM_X_train.size()[0])
# we go through the batches.
for i in range(0, MM_X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = MM_X_train[indices], MM_Y_train[indices]
# input MM_X_train into dnn and get predictions.
train_prediction = dnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
# now we test the model:
dnn_model.eval()
test_prediction = dnn_model(MM_X_test.to(device))
test_loss = loss_fn(test_prediction, MM_Y_test.to(device))
# if the current test loss determines the trial to be prunable, we prune it:
trial.report(test_loss, step=epoch)
if trial.should_prune():
raise optuna.TrialPruned()
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return test_loss
%%time
scaled_study = optuna.create_study(sampler=optuna.samplers.TPESampler(seed=42),study_name=minmax_study_name, direction='minimize')
scaled_study.optimize(objective_fn, n_trials=5000)
[I 2021-05-10 17:53:50,558] A new study created in memory with name: minmax_45_vars_half_layer_each_time
[I 2021-05-10 17:54:27,438] Trial 0 finished with value: 175.7174072265625 and parameters: {'lr': 0.005611516415334507, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 60, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 0 with value: 175.7174072265625.
[I 2021-05-10 17:55:11,704] Trial 1 finished with value: 184.4034423828125 and parameters: {'lr': 0.08706020878304858, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 538, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 0 with value: 175.7174072265625.
[I 2021-05-10 17:55:47,612] Trial 2 finished with value: 165.81497192382812 and parameters: {'lr': 0.008168455894760165, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 624, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 2 with value: 165.81497192382812.
[I 2021-05-10 17:55:56,525] Trial 3 finished with value: 239.18939208984375 and parameters: {'lr': 0.041380401125610165, 'batch_size': 64, 'n_layers': 2, 'neurons_HL1': 508, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'linear'}. Best is trial 2 with value: 165.81497192382812.
[I 2021-05-10 17:56:04,967] Trial 4 finished with value: 161.50067138671875 and parameters: {'lr': 0.004201672054372531, 'batch_size': 128, 'n_layers': 5, 'neurons_HL1': 964, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:56:05,034] Trial 5 pruned.
[I 2021-05-10 17:56:05,438] Trial 6 pruned.
[I 2021-05-10 17:56:05,546] Trial 7 pruned.
[I 2021-05-10 17:56:10,134] Trial 8 pruned.
[I 2021-05-10 17:57:08,331] Trial 9 finished with value: 163.53443908691406 and parameters: {'lr': 0.0021010799310103557, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 192, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:57:08,425] Trial 10 pruned.
[I 2021-05-10 17:57:08,509] Trial 11 pruned.
[I 2021-05-10 17:57:34,023] Trial 12 finished with value: 164.40794372558594 and parameters: {'lr': 0.0022012806099921192, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 272, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:57:34,108] Trial 13 pruned.
[I 2021-05-10 17:57:34,184] Trial 14 pruned.
[I 2021-05-10 17:57:35,359] Trial 15 pruned.
[I 2021-05-10 17:57:37,325] Trial 16 pruned.
[I 2021-05-10 17:57:37,463] Trial 17 pruned.
[I 2021-05-10 17:57:37,547] Trial 18 pruned.
[I 2021-05-10 17:57:38,097] Trial 19 pruned.
[I 2021-05-10 17:57:38,180] Trial 20 pruned.
[I 2021-05-10 17:57:40,495] Trial 21 pruned.
[I 2021-05-10 17:57:40,722] Trial 22 pruned.
[I 2021-05-10 17:58:10,098] Trial 23 finished with value: 163.5230712890625 and parameters: {'lr': 0.0016893825403529064, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 398, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:58:10,402] Trial 24 pruned.
[I 2021-05-10 17:58:11,586] Trial 25 pruned.
[I 2021-05-10 17:58:19,224] Trial 26 pruned.
[I 2021-05-10 17:58:19,794] Trial 27 pruned.
[I 2021-05-10 17:58:19,871] Trial 28 pruned.
[I 2021-05-10 17:58:49,081] Trial 29 finished with value: 164.23138427734375 and parameters: {'lr': 0.005665317770290171, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 670, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:58:49,239] Trial 30 pruned.
[I 2021-05-10 17:58:49,544] Trial 31 pruned.
[I 2021-05-10 17:59:19,036] Trial 32 finished with value: 182.00759887695312 and parameters: {'lr': 0.004083883382311936, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 842, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 17:59:19,343] Trial 33 pruned.
[I 2021-05-10 18:00:21,786] Trial 34 finished with value: 166.13629150390625 and parameters: {'lr': 0.006927581511478829, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 940, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:00:24,942] Trial 35 pruned.
[I 2021-05-10 18:01:25,809] Trial 36 finished with value: 161.9620819091797 and parameters: {'lr': 0.0027353116452462065, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 648, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:01:27,021] Trial 37 pruned.
[I 2021-05-10 18:01:27,562] Trial 38 pruned.
[I 2021-05-10 18:01:28,163] Trial 39 pruned.
[I 2021-05-10 18:02:20,985] Trial 40 finished with value: 170.76358032226562 and parameters: {'lr': 0.0013722098607284643, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 454, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:02:22,209] Trial 41 pruned.
[I 2021-05-10 18:02:52,126] Trial 42 finished with value: 173.59832763671875 and parameters: {'lr': 0.0035316211428223537, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 502, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:02:52,217] Trial 43 pruned.
[I 2021-05-10 18:02:52,405] Trial 44 pruned.
[I 2021-05-10 18:02:53,011] Trial 45 pruned.
[I 2021-05-10 18:02:53,315] Trial 46 pruned.
[I 2021-05-10 18:02:53,405] Trial 47 pruned.
[I 2021-05-10 18:02:53,990] Trial 48 pruned.
[I 2021-05-10 18:02:54,070] Trial 49 pruned.
[I 2021-05-10 18:02:54,661] Trial 50 pruned.
[I 2021-05-10 18:02:54,926] Trial 51 pruned.
[I 2021-05-10 18:02:55,154] Trial 52 pruned.
[I 2021-05-10 18:02:55,380] Trial 53 pruned.
[I 2021-05-10 18:02:55,647] Trial 54 pruned.
[I 2021-05-10 18:02:55,803] Trial 55 pruned.
[I 2021-05-10 18:02:56,102] Trial 56 pruned.
[I 2021-05-10 18:02:56,157] Trial 57 pruned.
[I 2021-05-10 18:02:56,759] Trial 58 pruned.
[I 2021-05-10 18:02:57,029] Trial 59 pruned.
[I 2021-05-10 18:02:57,129] Trial 60 pruned.
[I 2021-05-10 18:03:00,980] Trial 61 pruned.
[I 2021-05-10 18:03:05,898] Trial 62 pruned.
[I 2021-05-10 18:03:06,779] Trial 63 pruned.
[I 2021-05-10 18:03:07,138] Trial 64 pruned.
[I 2021-05-10 18:03:07,703] Trial 65 pruned.
[I 2021-05-10 18:03:08,014] Trial 66 pruned.
[I 2021-05-10 18:03:08,615] Trial 67 pruned.
[I 2021-05-10 18:03:08,880] Trial 68 pruned.
[I 2021-05-10 18:03:09,043] Trial 69 pruned.
[I 2021-05-10 18:03:09,134] Trial 70 pruned.
[I 2021-05-10 18:03:09,732] Trial 71 pruned.
[I 2021-05-10 18:03:10,337] Trial 72 pruned.
[I 2021-05-10 18:03:13,872] Trial 73 pruned.
[I 2021-05-10 18:03:14,477] Trial 74 pruned.
[I 2021-05-10 18:03:15,076] Trial 75 pruned.
[I 2021-05-10 18:03:20,227] Trial 76 pruned.
[I 2021-05-10 18:03:20,827] Trial 77 pruned.
[I 2021-05-10 18:03:21,415] Trial 78 pruned.
[I 2021-05-10 18:03:21,725] Trial 79 pruned.
[I 2021-05-10 18:03:21,805] Trial 80 pruned.
[I 2021-05-10 18:03:22,820] Trial 81 pruned.
[I 2021-05-10 18:03:40,449] Trial 82 pruned.
[I 2021-05-10 18:03:41,473] Trial 83 pruned.
[I 2021-05-10 18:03:42,340] Trial 84 pruned.
[I 2021-05-10 18:03:42,922] Trial 85 pruned.
[I 2021-05-10 18:04:08,090] Trial 86 finished with value: 163.9869842529297 and parameters: {'lr': 0.0018615789244781668, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 430, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:04:09,741] Trial 87 pruned.
[I 2021-05-10 18:04:38,390] Trial 88 finished with value: 167.7534942626953 and parameters: {'lr': 0.004346763795718522, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 408, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:04:48,448] Trial 89 pruned.
[I 2021-05-10 18:04:48,750] Trial 90 pruned.
[I 2021-05-10 18:05:18,203] Trial 91 finished with value: 165.7915802001953 and parameters: {'lr': 0.00595425421645083, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 426, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:05:18,502] Trial 92 pruned.
[I 2021-05-10 18:05:18,803] Trial 93 pruned.
[I 2021-05-10 18:05:19,101] Trial 94 pruned.
[I 2021-05-10 18:05:43,697] Trial 95 pruned.
[I 2021-05-10 18:05:43,949] Trial 96 pruned.
[I 2021-05-10 18:05:46,231] Trial 97 pruned.
[I 2021-05-10 18:05:46,326] Trial 98 pruned.
[I 2021-05-10 18:05:46,473] Trial 99 pruned.
[I 2021-05-10 18:05:46,776] Trial 100 pruned.
[I 2021-05-10 18:05:47,075] Trial 101 pruned.
[I 2021-05-10 18:05:47,369] Trial 102 pruned.
[I 2021-05-10 18:06:16,800] Trial 103 finished with value: 171.1871337890625 and parameters: {'lr': 0.004400548771571138, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 444, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:06:17,113] Trial 104 pruned.
[I 2021-05-10 18:06:17,420] Trial 105 pruned.
[I 2021-05-10 18:06:17,618] Trial 106 pruned.
[I 2021-05-10 18:06:17,934] Trial 107 pruned.
[I 2021-05-10 18:06:18,028] Trial 108 pruned.
[I 2021-05-10 18:06:22,043] Trial 109 pruned.
[I 2021-05-10 18:06:23,245] Trial 110 pruned.
[I 2021-05-10 18:06:23,784] Trial 111 pruned.
[I 2021-05-10 18:07:17,225] Trial 112 finished with value: 163.2682342529297 and parameters: {'lr': 0.0014878221633072997, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 656, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:07:18,310] Trial 113 pruned.
[I 2021-05-10 18:07:18,856] Trial 114 pruned.
[I 2021-05-10 18:08:11,198] Trial 115 finished with value: 162.2703857421875 and parameters: {'lr': 0.0020535207484778384, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 712, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:08:11,746] Trial 116 pruned.
[I 2021-05-10 18:08:12,293] Trial 117 pruned.
[I 2021-05-10 18:08:12,851] Trial 118 pruned.
[I 2021-05-10 18:08:13,393] Trial 119 pruned.
[I 2021-05-10 18:08:13,923] Trial 120 pruned.
[I 2021-05-10 18:08:14,471] Trial 121 pruned.
[I 2021-05-10 18:08:45,243] Trial 122 finished with value: 164.31057739257812 and parameters: {'lr': 0.004297570730414174, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 960, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 4 with value: 161.50067138671875.
[I 2021-05-10 18:08:45,339] Trial 123 pruned.
[I 2021-05-10 18:08:45,834] Trial 124 pruned.
[I 2021-05-10 18:08:46,110] Trial 125 pruned.
[I 2021-05-10 18:08:46,279] Trial 126 pruned.
[I 2021-05-10 18:08:49,792] Trial 127 pruned.
[I 2021-05-10 18:08:50,054] Trial 128 pruned.
[I 2021-05-10 18:08:50,636] Trial 129 pruned.
[I 2021-05-10 18:08:50,946] Trial 130 pruned.
[I 2021-05-10 18:08:56,740] Trial 131 pruned.
[I 2021-05-10 18:08:57,060] Trial 132 pruned.
[I 2021-05-10 18:09:27,902] Trial 133 finished with value: 159.60462951660156 and parameters: {'lr': 0.002908294235507702, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 708, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:09:28,223] Trial 134 pruned.
[I 2021-05-10 18:09:59,171] Trial 135 finished with value: 176.76901245117188 and parameters: {'lr': 0.003670224516104773, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 798, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:09:59,498] Trial 136 pruned.
[I 2021-05-10 18:09:59,597] Trial 137 pruned.
[I 2021-05-10 18:10:00,198] Trial 138 pruned.
[I 2021-05-10 18:10:00,473] Trial 139 pruned.
[I 2021-05-10 18:10:01,081] Trial 140 pruned.
[I 2021-05-10 18:10:30,735] Trial 141 finished with value: 165.85134887695312 and parameters: {'lr': 0.004057493716963243, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 424, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:10:31,042] Trial 142 pruned.
[I 2021-05-10 18:10:31,673] Trial 143 pruned.
[I 2021-05-10 18:10:31,989] Trial 144 pruned.
[I 2021-05-10 18:10:32,302] Trial 145 pruned.
[I 2021-05-10 18:10:34,152] Trial 146 pruned.
[I 2021-05-10 18:11:04,564] Trial 147 finished with value: 173.633056640625 and parameters: {'lr': 0.0031375145613642096, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 576, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:11:05,728] Trial 148 pruned.
[I 2021-05-10 18:11:06,033] Trial 149 pruned.
[I 2021-05-10 18:11:06,415] Trial 150 pruned.
[I 2021-05-10 18:11:35,936] Trial 151 finished with value: 172.05319213867188 and parameters: {'lr': 0.004256609094055981, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 408, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:11:36,258] Trial 152 pruned.
[I 2021-05-10 18:11:36,568] Trial 153 pruned.
[I 2021-05-10 18:11:36,880] Trial 154 pruned.
[I 2021-05-10 18:11:37,196] Trial 155 pruned.
[I 2021-05-10 18:11:37,289] Trial 156 pruned.
[I 2021-05-10 18:11:37,526] Trial 157 pruned.
[I 2021-05-10 18:11:38,065] Trial 158 pruned.
[I 2021-05-10 18:11:38,230] Trial 159 pruned.
[I 2021-05-10 18:11:38,561] Trial 160 pruned.
[I 2021-05-10 18:11:39,102] Trial 161 pruned.
[I 2021-05-10 18:12:31,345] Trial 162 finished with value: 166.5028839111328 and parameters: {'lr': 0.0019280103529492948, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 442, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:12:31,860] Trial 163 pruned.
[I 2021-05-10 18:12:32,384] Trial 164 pruned.
[I 2021-05-10 18:12:32,911] Trial 165 pruned.
[I 2021-05-10 18:12:33,451] Trial 166 pruned.
[I 2021-05-10 18:12:33,967] Trial 167 pruned.
[I 2021-05-10 18:12:34,271] Trial 168 pruned.
[I 2021-05-10 18:12:34,571] Trial 169 pruned.
[I 2021-05-10 18:13:25,964] Trial 170 finished with value: 169.0635223388672 and parameters: {'lr': 0.001958621466118359, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 416, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:13:32,664] Trial 171 pruned.
[I 2021-05-10 18:13:33,705] Trial 172 pruned.
[I 2021-05-10 18:13:34,241] Trial 173 pruned.
[I 2021-05-10 18:14:25,695] Trial 174 finished with value: 163.40211486816406 and parameters: {'lr': 0.001997503891842458, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 364, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:14:26,221] Trial 175 pruned.
[I 2021-05-10 18:14:26,316] Trial 176 pruned.
[I 2021-05-10 18:14:26,587] Trial 177 pruned.
[I 2021-05-10 18:14:27,226] Trial 178 pruned.
[I 2021-05-10 18:14:27,544] Trial 179 pruned.
[I 2021-05-10 18:14:28,073] Trial 180 pruned.
[I 2021-05-10 18:14:29,114] Trial 181 pruned.
[I 2021-05-10 18:14:29,645] Trial 182 pruned.
[I 2021-05-10 18:14:30,179] Trial 183 pruned.
[I 2021-05-10 18:14:31,222] Trial 184 pruned.
[I 2021-05-10 18:14:31,743] Trial 185 pruned.
[I 2021-05-10 18:14:32,048] Trial 186 pruned.
[I 2021-05-10 18:14:34,159] Trial 187 pruned.
[I 2021-05-10 18:14:34,475] Trial 188 pruned.
[I 2021-05-10 18:14:35,079] Trial 189 pruned.
[I 2021-05-10 18:14:35,363] Trial 190 pruned.
[I 2021-05-10 18:14:36,436] Trial 191 pruned.
[I 2021-05-10 18:14:36,978] Trial 192 pruned.
[I 2021-05-10 18:14:37,524] Trial 193 pruned.
[I 2021-05-10 18:14:38,575] Trial 194 pruned.
[I 2021-05-10 18:14:39,191] Trial 195 pruned.
[I 2021-05-10 18:14:39,281] Trial 196 pruned.
[I 2021-05-10 18:14:39,563] Trial 197 pruned.
[I 2021-05-10 18:14:40,171] Trial 198 pruned.
[I 2021-05-10 18:14:40,483] Trial 199 pruned.
[I 2021-05-10 18:14:48,152] Trial 200 pruned.
[I 2021-05-10 18:14:48,459] Trial 201 pruned.
[I 2021-05-10 18:14:48,769] Trial 202 pruned.
[I 2021-05-10 18:14:49,073] Trial 203 pruned.
[I 2021-05-10 18:15:18,678] Trial 204 finished with value: 164.1596221923828 and parameters: {'lr': 0.005086973309547956, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 404, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:15:18,988] Trial 205 pruned.
[I 2021-05-10 18:15:19,295] Trial 206 pruned.
[I 2021-05-10 18:15:19,571] Trial 207 pruned.
[I 2021-05-10 18:15:20,018] Trial 208 pruned.
[I 2021-05-10 18:15:20,184] Trial 209 pruned.
[I 2021-05-10 18:15:20,493] Trial 210 pruned.
[I 2021-05-10 18:15:20,803] Trial 211 pruned.
[I 2021-05-10 18:15:21,110] Trial 212 pruned.
[I 2021-05-10 18:15:21,422] Trial 213 pruned.
[I 2021-05-10 18:15:21,756] Trial 214 pruned.
[I 2021-05-10 18:15:22,084] Trial 215 pruned.
[I 2021-05-10 18:15:22,718] Trial 216 pruned.
[I 2021-05-10 18:15:23,009] Trial 217 pruned.
[I 2021-05-10 18:15:23,634] Trial 218 pruned.
[I 2021-05-10 18:15:23,730] Trial 219 pruned.
[I 2021-05-10 18:15:24,007] Trial 220 pruned.
[I 2021-05-10 18:15:24,318] Trial 221 pruned.
[I 2021-05-10 18:15:24,626] Trial 222 pruned.
[I 2021-05-10 18:15:24,935] Trial 223 pruned.
[I 2021-05-10 18:15:25,244] Trial 224 pruned.
[I 2021-05-10 18:15:25,856] Trial 225 pruned.
[I 2021-05-10 18:15:26,467] Trial 226 pruned.
[I 2021-05-10 18:15:26,840] Trial 227 pruned.
[I 2021-05-10 18:15:27,118] Trial 228 pruned.
[I 2021-05-10 18:15:27,726] Trial 229 pruned.
[I 2021-05-10 18:15:28,045] Trial 230 pruned.
[I 2021-05-10 18:15:28,357] Trial 231 pruned.
[I 2021-05-10 18:15:28,671] Trial 232 pruned.
[I 2021-05-10 18:15:28,987] Trial 233 pruned.
[I 2021-05-10 18:15:29,300] Trial 234 pruned.
[I 2021-05-10 18:15:29,612] Trial 235 pruned.
[I 2021-05-10 18:15:30,150] Trial 236 pruned.
[I 2021-05-10 18:16:00,032] Trial 237 finished with value: 168.26797485351562 and parameters: {'lr': 0.0026706188715931284, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 516, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:16:00,341] Trial 238 pruned.
[I 2021-05-10 18:16:00,951] Trial 239 pruned.
[I 2021-05-10 18:16:01,239] Trial 240 pruned.
[I 2021-05-10 18:16:01,557] Trial 241 pruned.
[I 2021-05-10 18:16:01,872] Trial 242 pruned.
[I 2021-05-10 18:16:02,490] Trial 243 pruned.
[I 2021-05-10 18:16:02,811] Trial 244 pruned.
[I 2021-05-10 18:17:02,687] Trial 245 finished with value: 165.1490478515625 and parameters: {'lr': 0.00415082183920592, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 610, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:17:03,285] Trial 246 pruned.
[I 2021-05-10 18:17:10,282] Trial 247 pruned.
[I 2021-05-10 18:17:10,880] Trial 248 pruned.
[I 2021-05-10 18:17:11,468] Trial 249 pruned.
[I 2021-05-10 18:17:12,062] Trial 250 pruned.
[I 2021-05-10 18:17:12,153] Trial 251 pruned.
[I 2021-05-10 18:17:12,707] Trial 252 pruned.
[I 2021-05-10 18:17:13,303] Trial 253 pruned.
[I 2021-05-10 18:17:13,813] Trial 254 pruned.
[I 2021-05-10 18:17:13,941] Trial 255 pruned.
[I 2021-05-10 18:17:14,518] Trial 256 pruned.
[I 2021-05-10 18:17:14,820] Trial 257 pruned.
[I 2021-05-10 18:17:15,333] Trial 258 pruned.
[I 2021-05-10 18:17:15,640] Trial 259 pruned.
[I 2021-05-10 18:17:16,170] Trial 260 pruned.
[I 2021-05-10 18:17:16,462] Trial 261 pruned.
[I 2021-05-10 18:17:16,560] Trial 262 pruned.
[I 2021-05-10 18:17:16,870] Trial 263 pruned.
[I 2021-05-10 18:17:17,456] Trial 264 pruned.
[I 2021-05-10 18:17:19,241] Trial 265 pruned.
[I 2021-05-10 18:17:19,837] Trial 266 pruned.
[I 2021-05-10 18:17:20,140] Trial 267 pruned.
[I 2021-05-10 18:17:20,662] Trial 268 pruned.
[I 2021-05-10 18:17:20,971] Trial 269 pruned.
[I 2021-05-10 18:17:21,468] Trial 270 pruned.
[I 2021-05-10 18:17:21,771] Trial 271 pruned.
[I 2021-05-10 18:17:22,078] Trial 272 pruned.
[I 2021-05-10 18:17:22,661] Trial 273 pruned.
[I 2021-05-10 18:17:49,740] Trial 274 finished with value: 167.06927490234375 and parameters: {'lr': 0.005018879729605584, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 924, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:17:50,310] Trial 275 pruned.
[I 2021-05-10 18:17:50,598] Trial 276 pruned.
[I 2021-05-10 18:17:51,164] Trial 277 pruned.
[I 2021-05-10 18:17:51,261] Trial 278 pruned.
[I 2021-05-10 18:17:51,546] Trial 279 pruned.
[I 2021-05-10 18:17:51,820] Trial 280 pruned.
[I 2021-05-10 18:17:52,898] Trial 281 pruned.
[I 2021-05-10 18:17:53,193] Trial 282 pruned.
[I 2021-05-10 18:18:47,088] Trial 283 finished with value: 170.37750244140625 and parameters: {'lr': 0.002903242495702699, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 738, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:19:39,654] Trial 284 finished with value: 164.64356994628906 and parameters: {'lr': 0.0029199461510492444, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 758, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:19:40,201] Trial 285 pruned.
[I 2021-05-10 18:19:40,758] Trial 286 pruned.
[I 2021-05-10 18:19:41,308] Trial 287 pruned.
[I 2021-05-10 18:19:41,860] Trial 288 pruned.
[I 2021-05-10 18:20:35,529] Trial 289 finished with value: 164.01004028320312 and parameters: {'lr': 0.0024546504045553827, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 766, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:20:36,079] Trial 290 pruned.
[I 2021-05-10 18:20:36,578] Trial 291 pruned.
[I 2021-05-10 18:20:37,137] Trial 292 pruned.
[I 2021-05-10 18:20:37,694] Trial 293 pruned.
[I 2021-05-10 18:20:37,849] Trial 294 pruned.
[I 2021-05-10 18:20:37,950] Trial 295 pruned.
[I 2021-05-10 18:20:38,514] Trial 296 pruned.
[I 2021-05-10 18:20:39,141] Trial 297 pruned.
[I 2021-05-10 18:20:39,429] Trial 298 pruned.
[I 2021-05-10 18:20:40,061] Trial 299 pruned.
[I 2021-05-10 18:21:06,825] Trial 300 finished with value: 160.1767578125 and parameters: {'lr': 0.0024229599681551, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 704, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:21:07,158] Trial 301 pruned.
[I 2021-05-10 18:21:07,365] Trial 302 pruned.
[I 2021-05-10 18:21:07,687] Trial 303 pruned.
[I 2021-05-10 18:21:07,982] Trial 304 pruned.
[I 2021-05-10 18:21:08,301] Trial 305 pruned.
[I 2021-05-10 18:21:08,631] Trial 306 pruned.
[I 2021-05-10 18:21:35,436] Trial 307 finished with value: 168.64437866210938 and parameters: {'lr': 0.0026383155298790933, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 686, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:21:35,777] Trial 308 pruned.
[I 2021-05-10 18:21:36,101] Trial 309 pruned.
[I 2021-05-10 18:21:36,386] Trial 310 pruned.
[I 2021-05-10 18:21:36,488] Trial 311 pruned.
[I 2021-05-10 18:21:36,776] Trial 312 pruned.
[I 2021-05-10 18:21:37,098] Trial 313 pruned.
[I 2021-05-10 18:21:37,418] Trial 314 pruned.
[I 2021-05-10 18:21:37,978] Trial 315 pruned.
[I 2021-05-10 18:21:38,314] Trial 316 pruned.
[I 2021-05-10 18:21:38,789] Trial 317 pruned.
[I 2021-05-10 18:21:39,071] Trial 318 pruned.
[I 2021-05-10 18:21:39,250] Trial 319 pruned.
[I 2021-05-10 18:21:40,485] Trial 320 pruned.
[I 2021-05-10 18:21:40,780] Trial 321 pruned.
[I 2021-05-10 18:21:41,095] Trial 322 pruned.
[I 2021-05-10 18:21:41,643] Trial 323 pruned.
[I 2021-05-10 18:21:41,745] Trial 324 pruned.
[I 2021-05-10 18:21:42,080] Trial 325 pruned.
[I 2021-05-10 18:21:42,658] Trial 326 pruned.
[I 2021-05-10 18:21:42,992] Trial 327 pruned.
[I 2021-05-10 18:21:43,548] Trial 328 pruned.
[I 2021-05-10 18:21:45,384] Trial 329 pruned.
[I 2021-05-10 18:21:45,944] Trial 330 pruned.
[I 2021-05-10 18:21:50,072] Trial 331 pruned.
[I 2021-05-10 18:21:51,336] Trial 332 pruned.
[I 2021-05-10 18:21:51,670] Trial 333 pruned.
[I 2021-05-10 18:21:54,894] Trial 334 pruned.
[I 2021-05-10 18:21:55,217] Trial 335 pruned.
[I 2021-05-10 18:21:55,784] Trial 336 pruned.
[I 2021-05-10 18:21:56,092] Trial 337 pruned.
[I 2021-05-10 18:21:56,180] Trial 338 pruned.
[I 2021-05-10 18:21:56,820] Trial 339 pruned.
[I 2021-05-10 18:21:58,728] Trial 340 pruned.
[I 2021-05-10 18:21:59,183] Trial 341 pruned.
[I 2021-05-10 18:21:59,505] Trial 342 pruned.
[I 2021-05-10 18:21:59,843] Trial 343 pruned.
[I 2021-05-10 18:22:00,396] Trial 344 pruned.
[I 2021-05-10 18:22:31,049] Trial 345 finished with value: 167.8688507080078 and parameters: {'lr': 0.0047613392707181006, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 792, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:22:32,118] Trial 346 pruned.
[I 2021-05-10 18:22:32,427] Trial 347 pruned.
[I 2021-05-10 18:22:33,031] Trial 348 pruned.
[I 2021-05-10 18:22:33,307] Trial 349 pruned.
[I 2021-05-10 18:22:33,479] Trial 350 pruned.
[I 2021-05-10 18:22:33,945] Trial 351 pruned.
[I 2021-05-10 18:22:34,216] Trial 352 pruned.
[I 2021-05-10 18:22:34,317] Trial 353 pruned.
[I 2021-05-10 18:22:34,626] Trial 354 pruned.
[I 2021-05-10 18:22:35,153] Trial 355 pruned.
[I 2021-05-10 18:22:36,643] Trial 356 pruned.
[I 2021-05-10 18:22:37,187] Trial 357 pruned.
[I 2021-05-10 18:22:37,492] Trial 358 pruned.
[I 2021-05-10 18:22:38,080] Trial 359 pruned.
[I 2021-05-10 18:22:41,642] Trial 360 pruned.
[I 2021-05-10 18:22:42,871] Trial 361 pruned.
[I 2021-05-10 18:22:43,196] Trial 362 pruned.
[I 2021-05-10 18:22:43,759] Trial 363 pruned.
[I 2021-05-10 18:22:44,031] Trial 364 pruned.
[I 2021-05-10 18:22:44,648] Trial 365 pruned.
[I 2021-05-10 18:22:44,994] Trial 366 pruned.
[I 2021-05-10 18:22:45,092] Trial 367 pruned.
[I 2021-05-10 18:22:45,381] Trial 368 pruned.
[I 2021-05-10 18:22:46,002] Trial 369 pruned.
[I 2021-05-10 18:22:46,300] Trial 370 pruned.
[I 2021-05-10 18:22:46,919] Trial 371 pruned.
[I 2021-05-10 18:22:47,200] Trial 372 pruned.
[I 2021-05-10 18:22:47,833] Trial 373 pruned.
[I 2021-05-10 18:22:48,148] Trial 374 pruned.
[I 2021-05-10 18:22:51,901] Trial 375 pruned.
[I 2021-05-10 18:22:52,232] Trial 376 pruned.
[I 2021-05-10 18:22:52,867] Trial 377 pruned.
[I 2021-05-10 18:22:53,146] Trial 378 pruned.
[I 2021-05-10 18:22:53,467] Trial 379 pruned.
[I 2021-05-10 18:22:53,617] Trial 380 pruned.
[I 2021-05-10 18:22:54,000] Trial 381 pruned.
[I 2021-05-10 18:22:54,255] Trial 382 pruned.
[I 2021-05-10 18:22:54,363] Trial 383 pruned.
[I 2021-05-10 18:22:54,918] Trial 384 pruned.
[I 2021-05-10 18:22:55,242] Trial 385 pruned.
[I 2021-05-10 18:22:55,869] Trial 386 pruned.
[I 2021-05-10 18:22:56,169] Trial 387 pruned.
[I 2021-05-10 18:22:56,791] Trial 388 pruned.
[I 2021-05-10 18:22:57,078] Trial 389 pruned.
[I 2021-05-10 18:22:58,310] Trial 390 pruned.
[I 2021-05-10 18:22:58,595] Trial 391 pruned.
[I 2021-05-10 18:22:59,221] Trial 392 pruned.
[I 2021-05-10 18:22:59,550] Trial 393 pruned.
[I 2021-05-10 18:22:59,826] Trial 394 pruned.
[I 2021-05-10 18:23:00,468] Trial 395 pruned.
[I 2021-05-10 18:23:00,818] Trial 396 pruned.
[I 2021-05-10 18:23:01,389] Trial 397 pruned.
[I 2021-05-10 18:23:01,490] Trial 398 pruned.
[I 2021-05-10 18:23:01,780] Trial 399 pruned.
[I 2021-05-10 18:23:02,394] Trial 400 pruned.
[I 2021-05-10 18:23:02,680] Trial 401 pruned.
[I 2021-05-10 18:23:03,307] Trial 402 pruned.
[I 2021-05-10 18:23:03,645] Trial 403 pruned.
[I 2021-05-10 18:23:04,274] Trial 404 pruned.
[I 2021-05-10 18:23:04,549] Trial 405 pruned.
[I 2021-05-10 18:23:05,177] Trial 406 pruned.
[I 2021-05-10 18:23:05,456] Trial 407 pruned.
[I 2021-05-10 18:23:05,788] Trial 408 pruned.
[I 2021-05-10 18:23:50,480] Trial 409 finished with value: 164.8675537109375 and parameters: {'lr': 0.006620729944300896, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 456, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:23:50,961] Trial 410 pruned.
[I 2021-05-10 18:23:51,424] Trial 411 pruned.
[I 2021-05-10 18:23:51,821] Trial 412 pruned.
[I 2021-05-10 18:23:52,306] Trial 413 pruned.
[I 2021-05-10 18:23:52,772] Trial 414 pruned.
[I 2021-05-10 18:23:53,152] Trial 415 pruned.
[I 2021-05-10 18:23:53,715] Trial 416 pruned.
[I 2021-05-10 18:23:54,255] Trial 417 pruned.
[I 2021-05-10 18:23:54,407] Trial 418 pruned.
[I 2021-05-10 18:23:54,804] Trial 419 pruned.
[I 2021-05-10 18:23:54,894] Trial 420 pruned.
[I 2021-05-10 18:23:55,449] Trial 421 pruned.
[I 2021-05-10 18:23:56,002] Trial 422 pruned.
[I 2021-05-10 18:23:56,535] Trial 423 pruned.
[I 2021-05-10 18:23:57,089] Trial 424 pruned.
[I 2021-05-10 18:23:57,629] Trial 425 pruned.
[I 2021-05-10 18:23:58,182] Trial 426 pruned.
[I 2021-05-10 18:23:58,292] Trial 427 pruned.
[I 2021-05-10 18:23:58,883] Trial 428 pruned.
[I 2021-05-10 18:23:59,223] Trial 429 pruned.
[I 2021-05-10 18:24:00,313] Trial 430 pruned.
[I 2021-05-10 18:24:00,632] Trial 431 pruned.
[I 2021-05-10 18:24:01,174] Trial 432 pruned.
[I 2021-05-10 18:24:01,505] Trial 433 pruned.
[I 2021-05-10 18:24:02,126] Trial 434 pruned.
[I 2021-05-10 18:24:02,351] Trial 435 pruned.
[I 2021-05-10 18:24:02,906] Trial 436 pruned.
[I 2021-05-10 18:24:03,229] Trial 437 pruned.
[I 2021-05-10 18:24:03,850] Trial 438 pruned.
[I 2021-05-10 18:24:04,088] Trial 439 pruned.
[I 2021-05-10 18:24:04,243] Trial 440 pruned.
[I 2021-05-10 18:24:04,870] Trial 441 pruned.
[I 2021-05-10 18:24:05,191] Trial 442 pruned.
[I 2021-05-10 18:24:05,762] Trial 443 pruned.
[I 2021-05-10 18:24:06,058] Trial 444 pruned.
[I 2021-05-10 18:24:06,160] Trial 445 pruned.
[I 2021-05-10 18:24:06,488] Trial 446 pruned.
[I 2021-05-10 18:24:07,077] Trial 447 pruned.
[I 2021-05-10 18:24:07,397] Trial 448 pruned.
[I 2021-05-10 18:24:08,005] Trial 449 pruned.
[I 2021-05-10 18:24:08,288] Trial 450 pruned.
[I 2021-05-10 18:24:08,911] Trial 451 pruned.
[I 2021-05-10 18:24:09,197] Trial 452 pruned.
[I 2021-05-10 18:24:09,827] Trial 453 pruned.
[I 2021-05-10 18:24:10,154] Trial 454 pruned.
[I 2021-05-10 18:24:10,691] Trial 455 pruned.
[I 2021-05-10 18:24:10,789] Trial 456 pruned.
[I 2021-05-10 18:24:11,067] Trial 457 pruned.
[I 2021-05-10 18:24:11,597] Trial 458 pruned.
[I 2021-05-10 18:24:11,912] Trial 459 pruned.
[I 2021-05-10 18:24:12,522] Trial 460 pruned.
[I 2021-05-10 18:24:12,831] Trial 461 pruned.
[I 2021-05-10 18:24:13,370] Trial 462 pruned.
[I 2021-05-10 18:24:13,611] Trial 463 pruned.
[I 2021-05-10 18:24:16,719] Trial 464 pruned.
[I 2021-05-10 18:24:17,028] Trial 465 pruned.
[I 2021-05-10 18:24:17,633] Trial 466 pruned.
[I 2021-05-10 18:24:17,917] Trial 467 pruned.
[I 2021-05-10 18:24:18,514] Trial 468 pruned.
[I 2021-05-10 18:24:18,659] Trial 469 pruned.
[I 2021-05-10 18:24:18,982] Trial 470 pruned.
[I 2021-05-10 18:24:19,507] Trial 471 pruned.
[I 2021-05-10 18:24:19,815] Trial 472 pruned.
[I 2021-05-10 18:24:20,368] Trial 473 pruned.
[I 2021-05-10 18:24:20,468] Trial 474 pruned.
[I 2021-05-10 18:24:20,786] Trial 475 pruned.
[I 2021-05-10 18:24:21,297] Trial 476 pruned.
[I 2021-05-10 18:24:21,612] Trial 477 pruned.
[I 2021-05-10 18:24:21,981] Trial 478 pruned.
[I 2021-05-10 18:24:22,289] Trial 479 pruned.
[I 2021-05-10 18:24:22,755] Trial 480 pruned.
[I 2021-05-10 18:24:23,023] Trial 481 pruned.
[I 2021-05-10 18:24:30,051] Trial 482 pruned.
[I 2021-05-10 18:24:30,369] Trial 483 pruned.
[I 2021-05-10 18:24:30,967] Trial 484 pruned.
[I 2021-05-10 18:24:31,274] Trial 485 pruned.
[I 2021-05-10 18:24:31,364] Trial 486 pruned.
[I 2021-05-10 18:24:31,961] Trial 487 pruned.
[I 2021-05-10 18:24:32,235] Trial 488 pruned.
[I 2021-05-10 18:25:31,431] Trial 489 finished with value: 181.73397827148438 and parameters: {'lr': 0.0045031381860189, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 660, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:25:31,702] Trial 490 pruned.
[I 2021-05-10 18:25:32,306] Trial 491 pruned.
[I 2021-05-10 18:25:32,596] Trial 492 pruned.
[I 2021-05-10 18:25:33,187] Trial 493 pruned.
[I 2021-05-10 18:25:33,500] Trial 494 pruned.
[I 2021-05-10 18:25:34,028] Trial 495 pruned.
[I 2021-05-10 18:25:34,336] Trial 496 pruned.
[I 2021-05-10 18:25:34,945] Trial 497 pruned.
[I 2021-05-10 18:25:35,219] Trial 498 pruned.
[I 2021-05-10 18:25:35,745] Trial 499 pruned.
[I 2021-05-10 18:25:35,926] Trial 500 pruned.
[I 2021-05-10 18:25:36,018] Trial 501 pruned.
[I 2021-05-10 18:25:36,334] Trial 502 pruned.
[I 2021-05-10 18:25:36,779] Trial 503 pruned.
[I 2021-05-10 18:25:37,088] Trial 504 pruned.
[I 2021-05-10 18:25:37,612] Trial 505 pruned.
[I 2021-05-10 18:25:37,924] Trial 506 pruned.
[I 2021-05-10 18:25:38,438] Trial 507 pruned.
[I 2021-05-10 18:25:38,755] Trial 508 pruned.
[I 2021-05-10 18:25:39,131] Trial 509 pruned.
[I 2021-05-10 18:25:39,437] Trial 510 pruned.
[I 2021-05-10 18:25:39,967] Trial 511 pruned.
[I 2021-05-10 18:26:06,435] Trial 512 finished with value: 165.63153076171875 and parameters: {'lr': 0.0022343157682239924, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 724, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:26:06,991] Trial 513 pruned.
[I 2021-05-10 18:26:07,281] Trial 514 pruned.
[I 2021-05-10 18:26:07,840] Trial 515 pruned.
[I 2021-05-10 18:26:08,136] Trial 516 pruned.
[I 2021-05-10 18:26:08,694] Trial 517 pruned.
[I 2021-05-10 18:26:08,979] Trial 518 pruned.
[I 2021-05-10 18:26:09,075] Trial 519 pruned.
[I 2021-05-10 18:26:09,634] Trial 520 pruned.
[I 2021-05-10 18:26:09,923] Trial 521 pruned.
[I 2021-05-10 18:27:04,180] Trial 522 finished with value: 162.51107788085938 and parameters: {'lr': 0.0029702688450788726, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 668, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:27:04,734] Trial 523 pruned.
[I 2021-05-10 18:27:05,286] Trial 524 pruned.
[I 2021-05-10 18:27:05,836] Trial 525 pruned.
[I 2021-05-10 18:27:14,396] Trial 526 pruned.
[I 2021-05-10 18:27:16,726] Trial 527 pruned.
[I 2021-05-10 18:27:17,300] Trial 528 pruned.
[I 2021-05-10 18:27:17,840] Trial 529 pruned.
[I 2021-05-10 18:27:18,393] Trial 530 pruned.
[I 2021-05-10 18:27:18,778] Trial 531 pruned.
[I 2021-05-10 18:27:19,339] Trial 532 pruned.
[I 2021-05-10 18:28:11,744] Trial 533 finished with value: 167.65638732910156 and parameters: {'lr': 0.001519731061412642, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 766, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 133 with value: 159.60462951660156.
[I 2021-05-10 18:28:12,808] Trial 534 pruned.
[I 2021-05-10 18:28:13,414] Trial 535 pruned.
[I 2021-05-10 18:28:13,506] Trial 536 pruned.
[I 2021-05-10 18:28:13,683] Trial 537 pruned.
[I 2021-05-10 18:28:14,230] Trial 538 pruned.
[I 2021-05-10 18:28:14,836] Trial 539 pruned.
[I 2021-05-10 18:28:15,466] Trial 540 pruned.
[I 2021-05-10 18:28:16,010] Trial 541 pruned.
[I 2021-05-10 18:28:16,630] Trial 542 pruned.
[I 2021-05-10 18:28:17,182] Trial 543 pruned.
[I 2021-05-10 18:28:17,799] Trial 544 pruned.
[I 2021-05-10 18:28:18,049] Trial 545 pruned.
[I 2021-05-10 18:28:18,143] Trial 546 pruned.
[I 2021-05-10 18:28:18,769] Trial 547 pruned.
[I 2021-05-10 18:28:19,088] Trial 548 pruned.
[I 2021-05-10 18:29:12,840] Trial 549 finished with value: 159.330078125 and parameters: {'lr': 0.0027909436465498446, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 676, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:29:15,990] Trial 550 pruned.
[I 2021-05-10 18:29:16,320] Trial 551 pruned.
[I 2021-05-10 18:29:16,873] Trial 552 pruned.
[I 2021-05-10 18:29:17,199] Trial 553 pruned.
[I 2021-05-10 18:29:17,751] Trial 554 pruned.
[I 2021-05-10 18:29:18,088] Trial 555 pruned.
[I 2021-05-10 18:29:18,713] Trial 556 pruned.
[I 2021-05-10 18:29:19,005] Trial 557 pruned.
[I 2021-05-10 18:29:19,542] Trial 558 pruned.
[I 2021-05-10 18:29:19,868] Trial 559 pruned.
[I 2021-05-10 18:30:20,218] Trial 560 finished with value: 172.98199462890625 and parameters: {'lr': 0.003168035668501484, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 734, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:30:20,318] Trial 561 pruned.
[I 2021-05-10 18:30:22,111] Trial 562 pruned.
[I 2021-05-10 18:30:22,274] Trial 563 pruned.
[I 2021-05-10 18:30:22,565] Trial 564 pruned.
[I 2021-05-10 18:30:22,950] Trial 565 pruned.
[I 2021-05-10 18:30:23,281] Trial 566 pruned.
[I 2021-05-10 18:30:23,831] Trial 567 pruned.
[I 2021-05-10 18:30:24,152] Trial 568 pruned.
[I 2021-05-10 18:30:24,681] Trial 569 pruned.
[I 2021-05-10 18:30:25,299] Trial 570 pruned.
[I 2021-05-10 18:30:25,830] Trial 571 pruned.
[I 2021-05-10 18:30:26,161] Trial 572 pruned.
[I 2021-05-10 18:30:26,738] Trial 573 pruned.
[I 2021-05-10 18:30:27,076] Trial 574 pruned.
[I 2021-05-10 18:30:27,642] Trial 575 pruned.
[I 2021-05-10 18:30:27,747] Trial 576 pruned.
[I 2021-05-10 18:30:28,387] Trial 577 pruned.
[I 2021-05-10 18:30:28,673] Trial 578 pruned.
[I 2021-05-10 18:30:29,312] Trial 579 pruned.
[I 2021-05-10 18:30:29,603] Trial 580 pruned.
[I 2021-05-10 18:30:30,140] Trial 581 pruned.
[I 2021-05-10 18:30:30,464] Trial 582 pruned.
[I 2021-05-10 18:30:31,071] Trial 583 pruned.
[I 2021-05-10 18:30:31,360] Trial 584 pruned.
[I 2021-05-10 18:30:31,878] Trial 585 pruned.
[I 2021-05-10 18:30:32,125] Trial 586 pruned.
[I 2021-05-10 18:30:32,754] Trial 587 pruned.
[I 2021-05-10 18:30:33,038] Trial 588 pruned.
[I 2021-05-10 18:30:33,658] Trial 589 pruned.
[I 2021-05-10 18:30:33,762] Trial 590 pruned.
[I 2021-05-10 18:30:34,315] Trial 591 pruned.
[I 2021-05-10 18:30:34,497] Trial 592 pruned.
[I 2021-05-10 18:30:34,797] Trial 593 pruned.
[I 2021-05-10 18:30:35,404] Trial 594 pruned.
[I 2021-05-10 18:30:35,701] Trial 595 pruned.
[I 2021-05-10 18:30:36,347] Trial 596 pruned.
[I 2021-05-10 18:30:36,640] Trial 597 pruned.
[I 2021-05-10 18:30:37,265] Trial 598 pruned.
[I 2021-05-10 18:30:37,594] Trial 599 pruned.
[I 2021-05-10 18:30:37,975] Trial 600 pruned.
[I 2021-05-10 18:30:38,259] Trial 601 pruned.
[I 2021-05-10 18:30:38,856] Trial 602 pruned.
[I 2021-05-10 18:30:39,152] Trial 603 pruned.
[I 2021-05-10 18:30:39,757] Trial 604 pruned.
[I 2021-05-10 18:30:39,861] Trial 605 pruned.
[I 2021-05-10 18:30:40,346] Trial 606 pruned.
[I 2021-05-10 18:30:40,631] Trial 607 pruned.
[I 2021-05-10 18:30:41,212] Trial 608 pruned.
[I 2021-05-10 18:30:41,525] Trial 609 pruned.
[I 2021-05-10 18:30:42,053] Trial 610 pruned.
[I 2021-05-10 18:30:42,380] Trial 611 pruned.
[I 2021-05-10 18:30:42,908] Trial 612 pruned.
[I 2021-05-10 18:30:43,219] Trial 613 pruned.
[I 2021-05-10 18:30:43,786] Trial 614 pruned.
[I 2021-05-10 18:30:44,096] Trial 615 pruned.
[I 2021-05-10 18:30:44,632] Trial 616 pruned.
[I 2021-05-10 18:30:44,950] Trial 617 pruned.
[I 2021-05-10 18:30:45,334] Trial 618 pruned.
[I 2021-05-10 18:30:45,428] Trial 619 pruned.
[I 2021-05-10 18:30:46,023] Trial 620 pruned.
[I 2021-05-10 18:30:46,338] Trial 621 pruned.
[I 2021-05-10 18:30:46,488] Trial 622 pruned.
[I 2021-05-10 18:30:47,094] Trial 623 pruned.
[I 2021-05-10 18:30:47,370] Trial 624 pruned.
[I 2021-05-10 18:30:47,970] Trial 625 pruned.
[I 2021-05-10 18:30:48,249] Trial 626 pruned.
[I 2021-05-10 18:30:48,697] Trial 627 pruned.
[I 2021-05-10 18:30:48,982] Trial 628 pruned.
[I 2021-05-10 18:30:49,588] Trial 629 pruned.
[I 2021-05-10 18:30:49,913] Trial 630 pruned.
[I 2021-05-10 18:30:50,435] Trial 631 pruned.
[I 2021-05-10 18:30:50,751] Trial 632 pruned.
[I 2021-05-10 18:30:51,278] Trial 633 pruned.
[I 2021-05-10 18:30:51,881] Trial 634 pruned.
[I 2021-05-10 18:30:51,978] Trial 635 pruned.
[I 2021-05-10 18:30:52,311] Trial 636 pruned.
[I 2021-05-10 18:30:52,853] Trial 637 pruned.
[I 2021-05-10 18:30:53,174] Trial 638 pruned.
[I 2021-05-10 18:30:53,788] Trial 639 pruned.
[I 2021-05-10 18:30:54,076] Trial 640 pruned.
[I 2021-05-10 18:30:54,599] Trial 641 pruned.
[I 2021-05-10 18:30:54,922] Trial 642 pruned.
[I 2021-05-10 18:30:55,471] Trial 643 pruned.
[I 2021-05-10 18:30:55,786] Trial 644 pruned.
[I 2021-05-10 18:30:56,345] Trial 645 pruned.
[I 2021-05-10 18:30:56,587] Trial 646 pruned.
[I 2021-05-10 18:30:57,190] Trial 647 pruned.
[I 2021-05-10 18:30:57,296] Trial 648 pruned.
[I 2021-05-10 18:30:57,840] Trial 649 pruned.
[I 2021-05-10 18:30:58,159] Trial 650 pruned.
[I 2021-05-10 18:30:58,714] Trial 651 pruned.
[I 2021-05-10 18:30:58,890] Trial 652 pruned.
[I 2021-05-10 18:30:59,202] Trial 653 pruned.
[I 2021-05-10 18:30:59,842] Trial 654 pruned.
[I 2021-05-10 18:31:00,141] Trial 655 pruned.
[I 2021-05-10 18:31:00,577] Trial 656 pruned.
[I 2021-05-10 18:31:00,931] Trial 657 pruned.
[I 2021-05-10 18:31:01,504] Trial 658 pruned.
[I 2021-05-10 18:31:01,832] Trial 659 pruned.
[I 2021-05-10 18:31:02,393] Trial 660 pruned.
[I 2021-05-10 18:31:02,745] Trial 661 pruned.
[I 2021-05-10 18:31:05,125] Trial 662 pruned.
[I 2021-05-10 18:31:05,222] Trial 663 pruned.
[I 2021-05-10 18:31:06,300] Trial 664 pruned.
[I 2021-05-10 18:31:06,637] Trial 665 pruned.
[I 2021-05-10 18:31:07,105] Trial 666 pruned.
[I 2021-05-10 18:31:07,432] Trial 667 pruned.
[I 2021-05-10 18:31:07,990] Trial 668 pruned.
[I 2021-05-10 18:31:08,327] Trial 669 pruned.
[I 2021-05-10 18:31:08,880] Trial 670 pruned.
[I 2021-05-10 18:31:09,222] Trial 671 pruned.
[I 2021-05-10 18:31:09,776] Trial 672 pruned.
[I 2021-05-10 18:31:10,097] Trial 673 pruned.
[I 2021-05-10 18:31:10,679] Trial 674 pruned.
[I 2021-05-10 18:31:11,009] Trial 675 pruned.
[I 2021-05-10 18:31:11,578] Trial 676 pruned.
[I 2021-05-10 18:31:12,200] Trial 677 pruned.
[I 2021-05-10 18:31:12,299] Trial 678 pruned.
[I 2021-05-10 18:31:12,629] Trial 679 pruned.
[I 2021-05-10 18:31:13,024] Trial 680 pruned.
[I 2021-05-10 18:31:13,310] Trial 681 pruned.
[I 2021-05-10 18:31:13,919] Trial 682 pruned.
[I 2021-05-10 18:31:14,105] Trial 683 pruned.
[I 2021-05-10 18:31:14,397] Trial 684 pruned.
[I 2021-05-10 18:31:16,819] Trial 685 pruned.
[I 2021-05-10 18:31:17,123] Trial 686 pruned.
[I 2021-05-10 18:31:18,908] Trial 687 pruned.
[I 2021-05-10 18:31:45,289] Trial 688 finished with value: 162.17282104492188 and parameters: {'lr': 0.0028284635019695165, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 790, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:31:45,579] Trial 689 pruned.
[I 2021-05-10 18:31:45,859] Trial 690 pruned.
[I 2021-05-10 18:31:46,148] Trial 691 pruned.
[I 2021-05-10 18:31:46,434] Trial 692 pruned.
[I 2021-05-10 18:31:46,719] Trial 693 pruned.
[I 2021-05-10 18:31:46,999] Trial 694 pruned.
[I 2021-05-10 18:31:47,281] Trial 695 pruned.
[I 2021-05-10 18:31:47,571] Trial 696 pruned.
[I 2021-05-10 18:31:47,855] Trial 697 pruned.
[I 2021-05-10 18:31:48,140] Trial 698 pruned.
[I 2021-05-10 18:31:48,430] Trial 699 pruned.
[I 2021-05-10 18:31:48,718] Trial 700 pruned.
[I 2021-05-10 18:31:48,998] Trial 701 pruned.
[I 2021-05-10 18:31:49,284] Trial 702 pruned.
[I 2021-05-10 18:31:49,368] Trial 703 pruned.
[I 2021-05-10 18:31:49,658] Trial 704 pruned.
[I 2021-05-10 18:31:49,931] Trial 705 pruned.
[I 2021-05-10 18:31:50,213] Trial 706 pruned.
[I 2021-05-10 18:31:50,498] Trial 707 pruned.
[I 2021-05-10 18:31:50,782] Trial 708 pruned.
[I 2021-05-10 18:31:51,066] Trial 709 pruned.
[I 2021-05-10 18:31:51,343] Trial 710 pruned.
[I 2021-05-10 18:31:51,438] Trial 711 pruned.
[I 2021-05-10 18:31:51,720] Trial 712 pruned.
[I 2021-05-10 18:31:51,874] Trial 713 pruned.
[I 2021-05-10 18:31:52,118] Trial 714 pruned.
[I 2021-05-10 18:31:52,324] Trial 715 pruned.
[I 2021-05-10 18:31:52,870] Trial 716 pruned.
[I 2021-05-10 18:31:53,142] Trial 717 pruned.
[I 2021-05-10 18:31:53,681] Trial 718 pruned.
[I 2021-05-10 18:31:53,958] Trial 719 pruned.
[I 2021-05-10 18:31:54,493] Trial 720 pruned.
[I 2021-05-10 18:31:54,812] Trial 721 pruned.
[I 2021-05-10 18:31:55,348] Trial 722 pruned.
[I 2021-05-10 18:31:55,674] Trial 723 pruned.
[I 2021-05-10 18:31:55,771] Trial 724 pruned.
[I 2021-05-10 18:31:56,380] Trial 725 pruned.
[I 2021-05-10 18:31:56,656] Trial 726 pruned.
[I 2021-05-10 18:31:57,264] Trial 727 pruned.
[I 2021-05-10 18:31:57,538] Trial 728 pruned.
[I 2021-05-10 18:31:58,013] Trial 729 pruned.
[I 2021-05-10 18:31:58,327] Trial 730 pruned.
[I 2021-05-10 18:31:58,864] Trial 731 pruned.
[I 2021-05-10 18:31:59,175] Trial 732 pruned.
[I 2021-05-10 18:31:59,719] Trial 733 pruned.
[I 2021-05-10 18:32:00,255] Trial 734 pruned.
[I 2021-05-10 18:32:00,565] Trial 735 pruned.
[I 2021-05-10 18:32:01,102] Trial 736 pruned.
[I 2021-05-10 18:32:01,419] Trial 737 pruned.
[I 2021-05-10 18:32:01,525] Trial 738 pruned.
[I 2021-05-10 18:32:02,054] Trial 739 pruned.
[I 2021-05-10 18:32:02,340] Trial 740 pruned.
[I 2021-05-10 18:32:02,936] Trial 741 pruned.
[I 2021-05-10 18:32:03,115] Trial 742 pruned.
[I 2021-05-10 18:32:03,395] Trial 743 pruned.
[I 2021-05-10 18:32:03,994] Trial 744 pruned.
[I 2021-05-10 18:32:04,275] Trial 745 pruned.
[I 2021-05-10 18:32:04,814] Trial 746 pruned.
[I 2021-05-10 18:32:05,138] Trial 747 pruned.
[I 2021-05-10 18:32:05,666] Trial 748 pruned.
[I 2021-05-10 18:32:05,987] Trial 749 pruned.
[I 2021-05-10 18:32:06,372] Trial 750 pruned.
[I 2021-05-10 18:32:06,657] Trial 751 pruned.
[I 2021-05-10 18:32:07,262] Trial 752 pruned.
[I 2021-05-10 18:32:07,354] Trial 753 pruned.
[I 2021-05-10 18:32:07,677] Trial 754 pruned.
[I 2021-05-10 18:32:08,230] Trial 755 pruned.
[I 2021-05-10 18:32:08,482] Trial 756 pruned.
[I 2021-05-10 18:32:09,077] Trial 757 pruned.
[I 2021-05-10 18:32:09,373] Trial 758 pruned.
[I 2021-05-10 18:32:09,963] Trial 759 pruned.
[I 2021-05-10 18:32:10,256] Trial 760 pruned.
[I 2021-05-10 18:32:10,851] Trial 761 pruned.
[I 2021-05-10 18:32:11,168] Trial 762 pruned.
[I 2021-05-10 18:32:11,702] Trial 763 pruned.
[I 2021-05-10 18:32:11,980] Trial 764 pruned.
[I 2021-05-10 18:32:12,588] Trial 765 pruned.
[I 2021-05-10 18:32:12,872] Trial 766 pruned.
[I 2021-05-10 18:32:13,499] Trial 767 pruned.
[I 2021-05-10 18:32:13,786] Trial 768 pruned.
[I 2021-05-10 18:32:13,874] Trial 769 pruned.
[I 2021-05-10 18:32:14,429] Trial 770 pruned.
[I 2021-05-10 18:32:14,771] Trial 771 pruned.
[I 2021-05-10 18:32:14,950] Trial 772 pruned.
[I 2021-05-10 18:32:15,593] Trial 773 pruned.
[I 2021-05-10 18:32:15,883] Trial 774 pruned.
[I 2021-05-10 18:32:16,411] Trial 775 pruned.
[I 2021-05-10 18:32:18,002] Trial 776 pruned.
[I 2021-05-10 18:33:18,161] Trial 777 finished with value: 164.50341796875 and parameters: {'lr': 0.0022864440256476558, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 728, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:33:18,804] Trial 778 pruned.
[I 2021-05-10 18:33:19,210] Trial 779 pruned.
[I 2021-05-10 18:33:19,775] Trial 780 pruned.
[I 2021-05-10 18:33:20,413] Trial 781 pruned.
[I 2021-05-10 18:33:20,979] Trial 782 pruned.
[I 2021-05-10 18:33:21,620] Trial 783 pruned.
[I 2021-05-10 18:33:22,185] Trial 784 pruned.
[I 2021-05-10 18:33:22,825] Trial 785 pruned.
[I 2021-05-10 18:33:23,378] Trial 786 pruned.
[I 2021-05-10 18:33:24,016] Trial 787 pruned.
[I 2021-05-10 18:33:24,578] Trial 788 pruned.
[I 2021-05-10 18:33:28,303] Trial 789 pruned.
[I 2021-05-10 18:33:28,872] Trial 790 pruned.
[I 2021-05-10 18:33:29,490] Trial 791 pruned.
[I 2021-05-10 18:33:30,056] Trial 792 pruned.
[I 2021-05-10 18:33:30,699] Trial 793 pruned.
[I 2021-05-10 18:33:30,801] Trial 794 pruned.
[I 2021-05-10 18:33:31,288] Trial 795 pruned.
[I 2021-05-10 18:33:31,924] Trial 796 pruned.
[I 2021-05-10 18:33:32,483] Trial 797 pruned.
[I 2021-05-10 18:33:33,131] Trial 798 pruned.
[I 2021-05-10 18:33:33,693] Trial 799 pruned.
[I 2021-05-10 18:33:35,833] Trial 800 pruned.
[I 2021-05-10 18:33:36,474] Trial 801 pruned.
[I 2021-05-10 18:33:36,582] Trial 802 pruned.
[I 2021-05-10 18:33:37,220] Trial 803 pruned.
[I 2021-05-10 18:33:37,383] Trial 804 pruned.
[I 2021-05-10 18:33:38,018] Trial 805 pruned.
[I 2021-05-10 18:33:38,658] Trial 806 pruned.
[I 2021-05-10 18:33:39,064] Trial 807 pruned.
[I 2021-05-10 18:33:40,169] Trial 808 pruned.
[I 2021-05-10 18:33:40,731] Trial 809 pruned.
[I 2021-05-10 18:33:41,069] Trial 810 pruned.
[I 2021-05-10 18:33:41,704] Trial 811 pruned.
[I 2021-05-10 18:33:41,808] Trial 812 pruned.
[I 2021-05-10 18:33:42,140] Trial 813 pruned.
[I 2021-05-10 18:33:42,717] Trial 814 pruned.
[I 2021-05-10 18:33:43,014] Trial 815 pruned.
[I 2021-05-10 18:33:43,518] Trial 816 pruned.
[I 2021-05-10 18:33:43,852] Trial 817 pruned.
[I 2021-05-10 18:33:44,423] Trial 818 pruned.
[I 2021-05-10 18:33:44,758] Trial 819 pruned.
[I 2021-05-10 18:33:45,305] Trial 820 pruned.
[I 2021-05-10 18:33:45,649] Trial 821 pruned.
[I 2021-05-10 18:33:46,211] Trial 822 pruned.
[I 2021-05-10 18:33:46,858] Trial 823 pruned.
[I 2021-05-10 18:33:47,159] Trial 824 pruned.
[I 2021-05-10 18:33:47,816] Trial 825 pruned.
[I 2021-05-10 18:33:48,116] Trial 826 pruned.
[I 2021-05-10 18:33:48,218] Trial 827 pruned.
[I 2021-05-10 18:33:48,851] Trial 828 pruned.
[I 2021-05-10 18:33:49,150] Trial 829 pruned.
[I 2021-05-10 18:33:49,342] Trial 830 pruned.
[I 2021-05-10 18:33:49,910] Trial 831 pruned.
[I 2021-05-10 18:33:50,253] Trial 832 pruned.
[I 2021-05-10 18:33:50,890] Trial 833 pruned.
[I 2021-05-10 18:33:51,107] Trial 834 pruned.
[I 2021-05-10 18:33:51,659] Trial 835 pruned.
[I 2021-05-10 18:33:51,925] Trial 836 pruned.
[I 2021-05-10 18:33:52,498] Trial 837 pruned.
[I 2021-05-10 18:33:53,144] Trial 838 pruned.
[I 2021-05-10 18:33:53,452] Trial 839 pruned.
[I 2021-05-10 18:33:54,076] Trial 840 pruned.
[I 2021-05-10 18:33:54,415] Trial 841 pruned.
[I 2021-05-10 18:33:54,527] Trial 842 pruned.
[I 2021-05-10 18:33:55,085] Trial 843 pruned.
[I 2021-05-10 18:33:55,420] Trial 844 pruned.
[I 2021-05-10 18:33:55,985] Trial 845 pruned.
[I 2021-05-10 18:33:56,319] Trial 846 pruned.
[I 2021-05-10 18:33:56,901] Trial 847 pruned.
[I 2021-05-10 18:33:57,235] Trial 848 pruned.
[I 2021-05-10 18:33:57,862] Trial 849 pruned.
[I 2021-05-10 18:33:58,155] Trial 850 pruned.
[I 2021-05-10 18:33:58,718] Trial 851 pruned.
[I 2021-05-10 18:33:59,366] Trial 852 pruned.
[I 2021-05-10 18:33:59,628] Trial 853 pruned.
[I 2021-05-10 18:34:00,191] Trial 854 pruned.
[I 2021-05-10 18:34:00,529] Trial 855 pruned.
[I 2021-05-10 18:34:00,631] Trial 856 pruned.
[I 2021-05-10 18:34:01,274] Trial 857 pruned.
[I 2021-05-10 18:34:01,574] Trial 858 pruned.
[I 2021-05-10 18:34:02,198] Trial 859 pruned.
[I 2021-05-10 18:34:02,369] Trial 860 pruned.
[I 2021-05-10 18:34:02,665] Trial 861 pruned.
[I 2021-05-10 18:34:03,310] Trial 862 pruned.
[I 2021-05-10 18:34:03,653] Trial 863 pruned.
[I 2021-05-10 18:34:04,217] Trial 864 pruned.
[I 2021-05-10 18:34:04,545] Trial 865 pruned.
[I 2021-05-10 18:34:06,870] Trial 866 pruned.
[I 2021-05-10 18:34:09,628] Trial 867 pruned.
[I 2021-05-10 18:34:09,926] Trial 868 pruned.
[I 2021-05-10 18:34:10,559] Trial 869 pruned.
[I 2021-05-10 18:34:11,205] Trial 870 pruned.
[I 2021-05-10 18:34:11,305] Trial 871 pruned.
[I 2021-05-10 18:34:11,879] Trial 872 pruned.
[I 2021-05-10 18:34:12,218] Trial 873 pruned.
[I 2021-05-10 18:34:12,804] Trial 874 pruned.
[I 2021-05-10 18:34:13,140] Trial 875 pruned.
[I 2021-05-10 18:34:13,610] Trial 876 pruned.
[I 2021-05-10 18:34:13,902] Trial 877 pruned.
[I 2021-05-10 18:34:15,166] Trial 878 pruned.
[I 2021-05-10 18:34:15,510] Trial 879 pruned.
[I 2021-05-10 18:34:16,634] Trial 880 pruned.
[I 2021-05-10 18:34:17,181] Trial 881 pruned.
[I 2021-05-10 18:34:47,277] Trial 882 finished with value: 160.0874481201172 and parameters: {'lr': 0.004398533398322774, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 454, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:34:47,610] Trial 883 pruned.
[I 2021-05-10 18:34:47,945] Trial 884 pruned.
[I 2021-05-10 18:34:48,282] Trial 885 pruned.
[I 2021-05-10 18:34:48,620] Trial 886 pruned.
[I 2021-05-10 18:34:48,961] Trial 887 pruned.
[I 2021-05-10 18:34:49,301] Trial 888 pruned.
[I 2021-05-10 18:34:49,639] Trial 889 pruned.
[I 2021-05-10 18:34:49,979] Trial 890 pruned.
[I 2021-05-10 18:34:50,317] Trial 891 pruned.
[I 2021-05-10 18:34:50,654] Trial 892 pruned.
[I 2021-05-10 18:34:50,993] Trial 893 pruned.
[I 2021-05-10 18:34:51,330] Trial 894 pruned.
[I 2021-05-10 18:34:51,667] Trial 895 pruned.
[I 2021-05-10 18:34:51,933] Trial 896 pruned.
[I 2021-05-10 18:34:52,271] Trial 897 pruned.
[I 2021-05-10 18:34:52,629] Trial 898 pruned.
[I 2021-05-10 18:34:52,971] Trial 899 pruned.
[I 2021-05-10 18:34:53,087] Trial 900 pruned.
[I 2021-05-10 18:34:53,432] Trial 901 pruned.
[I 2021-05-10 18:34:53,778] Trial 902 pruned.
[I 2021-05-10 18:34:53,971] Trial 903 pruned.
[I 2021-05-10 18:34:54,288] Trial 904 pruned.
[I 2021-05-10 18:34:54,633] Trial 905 pruned.
[I 2021-05-10 18:34:54,932] Trial 906 pruned.
[I 2021-05-10 18:34:55,264] Trial 907 pruned.
[I 2021-05-10 18:34:55,364] Trial 908 pruned.
[I 2021-05-10 18:34:55,689] Trial 909 pruned.
[I 2021-05-10 18:34:55,985] Trial 910 pruned.
[I 2021-05-10 18:34:56,332] Trial 911 pruned.
[I 2021-05-10 18:34:56,636] Trial 912 pruned.
[I 2021-05-10 18:34:56,893] Trial 913 pruned.
[I 2021-05-10 18:34:57,234] Trial 914 pruned.
[I 2021-05-10 18:34:57,535] Trial 915 pruned.
[I 2021-05-10 18:34:57,648] Trial 916 pruned.
[I 2021-05-10 18:34:57,950] Trial 917 pruned.
[I 2021-05-10 18:34:59,164] Trial 918 pruned.
[I 2021-05-10 18:34:59,475] Trial 919 pruned.
[I 2021-05-10 18:35:00,124] Trial 920 pruned.
[I 2021-05-10 18:35:00,421] Trial 921 pruned.
[I 2021-05-10 18:35:01,073] Trial 922 pruned.
[I 2021-05-10 18:35:01,269] Trial 923 pruned.
[I 2021-05-10 18:35:01,535] Trial 924 pruned.
[I 2021-05-10 18:35:02,176] Trial 925 pruned.
[I 2021-05-10 18:35:02,491] Trial 926 pruned.
[I 2021-05-10 18:35:03,134] Trial 927 pruned.
[I 2021-05-10 18:35:03,440] Trial 928 pruned.
[I 2021-05-10 18:35:04,089] Trial 929 pruned.
[I 2021-05-10 18:35:04,197] Trial 930 pruned.
[I 2021-05-10 18:35:04,544] Trial 931 pruned.
[I 2021-05-10 18:35:05,123] Trial 932 pruned.
[I 2021-05-10 18:35:05,474] Trial 933 pruned.
[I 2021-05-10 18:35:06,027] Trial 934 pruned.
[I 2021-05-10 18:35:06,365] Trial 935 pruned.
[I 2021-05-10 18:35:06,932] Trial 936 pruned.
[I 2021-05-10 18:35:07,271] Trial 937 pruned.
[I 2021-05-10 18:35:07,906] Trial 938 pruned.
[I 2021-05-10 18:35:08,215] Trial 939 pruned.
[I 2021-05-10 18:35:08,763] Trial 940 pruned.
[I 2021-05-10 18:35:09,110] Trial 941 pruned.
[I 2021-05-10 18:35:09,739] Trial 942 pruned.
[I 2021-05-10 18:35:10,042] Trial 943 pruned.
[I 2021-05-10 18:35:10,545] Trial 944 pruned.
[I 2021-05-10 18:35:10,663] Trial 945 pruned.
[I 2021-05-10 18:35:10,966] Trial 946 pruned.
[I 2021-05-10 18:35:14,140] Trial 947 pruned.
[I 2021-05-10 18:35:14,480] Trial 948 pruned.
[I 2021-05-10 18:35:15,119] Trial 949 pruned.
[I 2021-05-10 18:35:15,420] Trial 950 pruned.
[I 2021-05-10 18:35:15,990] Trial 951 pruned.
[I 2021-05-10 18:35:16,335] Trial 952 pruned.
[I 2021-05-10 18:35:16,908] Trial 953 pruned.
[I 2021-05-10 18:35:17,252] Trial 954 pruned.
[I 2021-05-10 18:35:17,421] Trial 955 pruned.
[I 2021-05-10 18:35:18,063] Trial 956 pruned.
[I 2021-05-10 18:35:18,376] Trial 957 pruned.
[I 2021-05-10 18:35:19,007] Trial 958 pruned.
[I 2021-05-10 18:35:19,128] Trial 959 pruned.
[I 2021-05-10 18:35:19,420] Trial 960 pruned.
[I 2021-05-10 18:35:22,079] Trial 961 pruned.
[I 2021-05-10 18:35:22,427] Trial 962 pruned.
[I 2021-05-10 18:35:22,997] Trial 963 pruned.
[I 2021-05-10 18:35:23,341] Trial 964 pruned.
[I 2021-05-10 18:35:23,813] Trial 965 pruned.
[I 2021-05-10 18:35:24,160] Trial 966 pruned.
[I 2021-05-10 18:35:24,717] Trial 967 pruned.
[I 2021-05-10 18:35:25,012] Trial 968 pruned.
[I 2021-05-10 18:36:27,336] Trial 969 finished with value: 163.35867309570312 and parameters: {'lr': 0.0017320202968756815, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 964, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:36:27,992] Trial 970 pruned.
[I 2021-05-10 18:36:28,581] Trial 971 pruned.
[I 2021-05-10 18:36:29,247] Trial 972 pruned.
[I 2021-05-10 18:36:32,393] Trial 973 pruned.
[I 2021-05-10 18:36:32,996] Trial 974 pruned.
[I 2021-05-10 18:36:35,726] Trial 975 pruned.
[I 2021-05-10 18:36:37,001] Trial 976 pruned.
[I 2021-05-10 18:36:37,576] Trial 977 pruned.
[I 2021-05-10 18:36:38,243] Trial 978 pruned.
[I 2021-05-10 18:36:38,905] Trial 979 pruned.
[I 2021-05-10 18:36:39,505] Trial 980 pruned.
[I 2021-05-10 18:36:40,178] Trial 981 pruned.
[I 2021-05-10 18:36:40,779] Trial 982 pruned.
[I 2021-05-10 18:36:41,446] Trial 983 pruned.
[I 2021-05-10 18:36:42,028] Trial 984 pruned.
[I 2021-05-10 18:36:42,687] Trial 985 pruned.
[I 2021-05-10 18:36:43,288] Trial 986 pruned.
[I 2021-05-10 18:36:43,838] Trial 987 pruned.
[I 2021-05-10 18:36:44,484] Trial 988 pruned.
[I 2021-05-10 18:37:41,143] Trial 989 finished with value: 167.4645538330078 and parameters: {'lr': 0.0017847171099451447, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 972, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:37:41,262] Trial 990 pruned.
[I 2021-05-10 18:38:43,810] Trial 991 finished with value: 176.75094604492188 and parameters: {'lr': 0.0022970549960030116, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 850, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:38:44,954] Trial 992 pruned.
[I 2021-05-10 18:38:45,144] Trial 993 pruned.
[I 2021-05-10 18:38:45,705] Trial 994 pruned.
[I 2021-05-10 18:38:46,347] Trial 995 pruned.
[I 2021-05-10 18:38:46,921] Trial 996 pruned.
[I 2021-05-10 18:38:47,559] Trial 997 pruned.
[I 2021-05-10 18:38:47,677] Trial 998 pruned.
[I 2021-05-10 18:38:48,230] Trial 999 pruned.
[I 2021-05-10 18:38:48,856] Trial 1000 pruned.
[I 2021-05-10 18:38:49,448] Trial 1001 pruned.
[I 2021-05-10 18:38:50,099] Trial 1002 pruned.
[I 2021-05-10 18:38:50,695] Trial 1003 pruned.
[I 2021-05-10 18:38:51,356] Trial 1004 pruned.
[I 2021-05-10 18:38:52,460] Trial 1005 pruned.
[I 2021-05-10 18:38:52,584] Trial 1006 pruned.
[I 2021-05-10 18:38:53,163] Trial 1007 pruned.
[I 2021-05-10 18:38:53,682] Trial 1008 pruned.
[I 2021-05-10 18:38:54,311] Trial 1009 pruned.
[I 2021-05-10 18:38:54,477] Trial 1010 pruned.
[I 2021-05-10 18:38:55,093] Trial 1011 pruned.
[I 2021-05-10 18:38:55,387] Trial 1012 pruned.
[I 2021-05-10 18:38:56,004] Trial 1013 pruned.
[I 2021-05-10 18:38:56,293] Trial 1014 pruned.
[I 2021-05-10 18:38:56,905] Trial 1015 pruned.
[I 2021-05-10 18:38:57,201] Trial 1016 pruned.
[I 2021-05-10 18:39:57,805] Trial 1017 finished with value: 162.83807373046875 and parameters: {'lr': 0.0016875001548090666, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 682, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:39:58,452] Trial 1018 pruned.
[I 2021-05-10 18:39:59,094] Trial 1019 pruned.
[I 2021-05-10 18:39:59,725] Trial 1020 pruned.
[I 2021-05-10 18:40:00,375] Trial 1021 pruned.
[I 2021-05-10 18:40:01,015] Trial 1022 pruned.
[I 2021-05-10 18:40:01,655] Trial 1023 pruned.
[I 2021-05-10 18:40:02,284] Trial 1024 pruned.
[I 2021-05-10 18:40:02,921] Trial 1025 pruned.
[I 2021-05-10 18:40:03,578] Trial 1026 pruned.
[I 2021-05-10 18:40:04,212] Trial 1027 pruned.
[I 2021-05-10 18:40:04,861] Trial 1028 pruned.
[I 2021-05-10 18:40:08,492] Trial 1029 pruned.
[I 2021-05-10 18:40:10,950] Trial 1030 pruned.
[I 2021-05-10 18:40:11,596] Trial 1031 pruned.
[I 2021-05-10 18:40:12,244] Trial 1032 pruned.
[I 2021-05-10 18:40:12,889] Trial 1033 pruned.
[I 2021-05-10 18:40:13,509] Trial 1034 pruned.
[I 2021-05-10 18:40:14,133] Trial 1035 pruned.
[I 2021-05-10 18:40:14,764] Trial 1036 pruned.
[I 2021-05-10 18:40:14,879] Trial 1037 pruned.
[I 2021-05-10 18:40:15,514] Trial 1038 pruned.
[I 2021-05-10 18:40:16,137] Trial 1039 pruned.
[I 2021-05-10 18:40:17,379] Trial 1040 pruned.
[I 2021-05-10 18:40:18,623] Trial 1041 pruned.
[I 2021-05-10 18:40:19,279] Trial 1042 pruned.
[I 2021-05-10 18:40:21,663] Trial 1043 pruned.
[I 2021-05-10 18:40:22,275] Trial 1044 pruned.
[I 2021-05-10 18:40:22,369] Trial 1045 pruned.
[I 2021-05-10 18:40:22,988] Trial 1046 pruned.
[I 2021-05-10 18:40:23,628] Trial 1047 pruned.
[I 2021-05-10 18:40:24,279] Trial 1048 pruned.
[I 2021-05-10 18:40:24,477] Trial 1049 pruned.
[I 2021-05-10 18:40:25,124] Trial 1050 pruned.
[I 2021-05-10 18:40:25,785] Trial 1051 pruned.
[I 2021-05-10 18:40:26,412] Trial 1052 pruned.
[I 2021-05-10 18:40:27,080] Trial 1053 pruned.
[I 2021-05-10 18:40:27,717] Trial 1054 pruned.
[I 2021-05-10 18:40:27,838] Trial 1055 pruned.
[I 2021-05-10 18:40:28,489] Trial 1056 pruned.
[I 2021-05-10 18:40:28,992] Trial 1057 pruned.
[I 2021-05-10 18:40:29,645] Trial 1058 pruned.
[I 2021-05-10 18:40:30,298] Trial 1059 pruned.
[I 2021-05-10 18:40:30,970] Trial 1060 pruned.
[I 2021-05-10 18:40:31,603] Trial 1061 pruned.
[I 2021-05-10 18:40:32,241] Trial 1062 pruned.
[I 2021-05-10 18:40:32,358] Trial 1063 pruned.
[I 2021-05-10 18:40:33,028] Trial 1064 pruned.
[I 2021-05-10 18:40:33,335] Trial 1065 pruned.
[I 2021-05-10 18:40:33,984] Trial 1066 pruned.
[I 2021-05-10 18:40:34,289] Trial 1067 pruned.
[I 2021-05-10 18:40:34,795] Trial 1068 pruned.
[I 2021-05-10 18:40:34,991] Trial 1069 pruned.
[I 2021-05-10 18:40:35,292] Trial 1070 pruned.
[I 2021-05-10 18:40:35,938] Trial 1071 pruned.
[I 2021-05-10 18:40:36,490] Trial 1072 pruned.
[I 2021-05-10 18:40:36,826] Trial 1073 pruned.
[I 2021-05-10 18:40:37,475] Trial 1074 pruned.
[I 2021-05-10 18:40:37,779] Trial 1075 pruned.
[I 2021-05-10 18:40:38,350] Trial 1076 pruned.
[I 2021-05-10 18:40:38,708] Trial 1077 pruned.
[I 2021-05-10 18:40:39,265] Trial 1078 pruned.
[I 2021-05-10 18:40:39,388] Trial 1079 pruned.
[I 2021-05-10 18:40:39,947] Trial 1080 pruned.
[I 2021-05-10 18:40:40,289] Trial 1081 pruned.
[I 2021-05-10 18:40:40,939] Trial 1082 pruned.
[I 2021-05-10 18:40:41,239] Trial 1083 pruned.
[I 2021-05-10 18:40:41,886] Trial 1084 pruned.
[I 2021-05-10 18:40:42,473] Trial 1085 pruned.
[I 2021-05-10 18:40:43,121] Trial 1086 pruned.
[I 2021-05-10 18:40:43,622] Trial 1087 pruned.
[I 2021-05-10 18:40:43,956] Trial 1088 pruned.
[I 2021-05-10 18:40:44,548] Trial 1089 pruned.
[I 2021-05-10 18:40:44,847] Trial 1090 pruned.
[I 2021-05-10 18:40:45,499] Trial 1091 pruned.
[I 2021-05-10 18:40:45,617] Trial 1092 pruned.
[I 2021-05-10 18:40:45,940] Trial 1093 pruned.
[I 2021-05-10 18:40:46,597] Trial 1094 pruned.
[I 2021-05-10 18:40:47,177] Trial 1095 pruned.
[I 2021-05-10 18:40:47,523] Trial 1096 pruned.
[I 2021-05-10 18:40:48,095] Trial 1097 pruned.
[I 2021-05-10 18:40:48,293] Trial 1098 pruned.
[I 2021-05-10 18:40:48,638] Trial 1099 pruned.
[I 2021-05-10 18:40:49,222] Trial 1100 pruned.
[I 2021-05-10 18:40:49,775] Trial 1101 pruned.
[I 2021-05-10 18:40:50,117] Trial 1102 pruned.
[I 2021-05-10 18:40:50,771] Trial 1103 pruned.
[I 2021-05-10 18:40:51,066] Trial 1104 pruned.
[I 2021-05-10 18:40:51,637] Trial 1105 pruned.
[I 2021-05-10 18:40:51,981] Trial 1106 pruned.
[I 2021-05-10 18:40:52,655] Trial 1107 pruned.
[I 2021-05-10 18:40:52,766] Trial 1108 pruned.
[I 2021-05-10 18:40:53,338] Trial 1109 pruned.
[I 2021-05-10 18:40:53,694] Trial 1110 pruned.
[I 2021-05-10 18:40:54,344] Trial 1111 pruned.
[I 2021-05-10 18:40:54,670] Trial 1112 pruned.
[I 2021-05-10 18:40:55,227] Trial 1113 pruned.
[I 2021-05-10 18:40:55,585] Trial 1114 pruned.
[I 2021-05-10 18:40:56,226] Trial 1115 pruned.
[I 2021-05-10 18:40:56,801] Trial 1116 pruned.
[I 2021-05-10 18:40:57,145] Trial 1117 pruned.
[I 2021-05-10 18:40:57,728] Trial 1118 pruned.
[I 2021-05-10 18:40:58,084] Trial 1119 pruned.
[I 2021-05-10 18:40:58,729] Trial 1120 pruned.
[I 2021-05-10 18:40:58,841] Trial 1121 pruned.
[I 2021-05-10 18:40:59,114] Trial 1122 pruned.
[I 2021-05-10 18:40:59,776] Trial 1123 pruned.
[I 2021-05-10 18:41:00,339] Trial 1124 pruned.
[I 2021-05-10 18:41:00,695] Trial 1125 pruned.
[I 2021-05-10 18:41:01,307] Trial 1126 pruned.
[I 2021-05-10 18:41:01,655] Trial 1127 pruned.
[I 2021-05-10 18:41:01,828] Trial 1128 pruned.
[I 2021-05-10 18:41:02,482] Trial 1129 pruned.
[I 2021-05-10 18:41:03,129] Trial 1130 pruned.
[I 2021-05-10 18:41:03,440] Trial 1131 pruned.
[I 2021-05-10 18:41:04,099] Trial 1132 pruned.
[I 2021-05-10 18:41:04,412] Trial 1133 pruned.
[I 2021-05-10 18:41:05,035] Trial 1134 pruned.
[I 2021-05-10 18:41:05,353] Trial 1135 pruned.
[I 2021-05-10 18:41:07,771] Trial 1136 pruned.
[I 2021-05-10 18:41:07,882] Trial 1137 pruned.
[I 2021-05-10 18:41:08,531] Trial 1138 pruned.
[I 2021-05-10 18:41:08,844] Trial 1139 pruned.
[I 2021-05-10 18:41:09,486] Trial 1140 pruned.
[I 2021-05-10 18:41:09,788] Trial 1141 pruned.
[I 2021-05-10 18:41:10,433] Trial 1142 pruned.
[I 2021-05-10 18:41:11,071] Trial 1143 pruned.
[I 2021-05-10 18:41:11,635] Trial 1144 pruned.
[I 2021-05-10 18:41:12,125] Trial 1145 pruned.
[I 2021-05-10 18:41:12,436] Trial 1146 pruned.
[I 2021-05-10 18:41:13,078] Trial 1147 pruned.
[I 2021-05-10 18:41:13,417] Trial 1148 pruned.
[I 2021-05-10 18:41:14,025] Trial 1149 pruned.
[I 2021-05-10 18:41:14,377] Trial 1150 pruned.
[I 2021-05-10 18:41:14,500] Trial 1151 pruned.
[I 2021-05-10 18:41:15,076] Trial 1152 pruned.
[I 2021-05-10 18:41:15,648] Trial 1153 pruned.
[I 2021-05-10 18:41:15,990] Trial 1154 pruned.
[I 2021-05-10 18:41:16,626] Trial 1155 pruned.
[I 2021-05-10 18:41:16,934] Trial 1156 pruned.
[I 2021-05-10 18:41:17,576] Trial 1157 pruned.
[I 2021-05-10 18:41:17,754] Trial 1158 pruned.
[I 2021-05-10 18:41:20,218] Trial 1159 pruned.
[I 2021-05-10 18:41:20,573] Trial 1160 pruned.
[I 2021-05-10 18:41:21,677] Trial 1161 pruned.
[I 2021-05-10 18:41:22,028] Trial 1162 pruned.
[I 2021-05-10 18:41:22,601] Trial 1163 pruned.
[I 2021-05-10 18:41:22,941] Trial 1164 pruned.
[I 2021-05-10 18:41:23,514] Trial 1165 pruned.
[I 2021-05-10 18:41:23,615] Trial 1166 pruned.
[I 2021-05-10 18:41:24,178] Trial 1167 pruned.
[I 2021-05-10 18:41:24,535] Trial 1168 pruned.
[I 2021-05-10 18:41:25,174] Trial 1169 pruned.
[I 2021-05-10 18:41:25,485] Trial 1170 pruned.
[I 2021-05-10 18:41:26,150] Trial 1171 pruned.
[I 2021-05-10 18:41:26,495] Trial 1172 pruned.
[I 2021-05-10 18:41:27,053] Trial 1173 pruned.
[I 2021-05-10 18:41:27,708] Trial 1174 pruned.
[I 2021-05-10 18:41:55,088] Trial 1175 finished with value: 181.92909240722656 and parameters: {'lr': 0.0029082278662772285, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 848, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:41:55,650] Trial 1176 pruned.
[I 2021-05-10 18:41:56,006] Trial 1177 pruned.
[I 2021-05-10 18:41:56,654] Trial 1178 pruned.
[I 2021-05-10 18:41:56,956] Trial 1179 pruned.
[I 2021-05-10 18:41:57,596] Trial 1180 pruned.
[I 2021-05-10 18:41:57,707] Trial 1181 pruned.
[I 2021-05-10 18:41:58,343] Trial 1182 pruned.
[I 2021-05-10 18:41:58,687] Trial 1183 pruned.
[I 2021-05-10 18:41:59,264] Trial 1184 pruned.
[I 2021-05-10 18:41:59,574] Trial 1185 pruned.
[I 2021-05-10 18:42:00,208] Trial 1186 pruned.
[I 2021-05-10 18:42:00,357] Trial 1187 pruned.
[I 2021-05-10 18:42:00,998] Trial 1188 pruned.
[I 2021-05-10 18:42:01,311] Trial 1189 pruned.
[I 2021-05-10 18:42:01,986] Trial 1190 pruned.
[I 2021-05-10 18:42:02,302] Trial 1191 pruned.
[I 2021-05-10 18:42:02,967] Trial 1192 pruned.
[I 2021-05-10 18:42:03,295] Trial 1193 pruned.
[I 2021-05-10 18:43:03,473] Trial 1194 finished with value: 165.5704345703125 and parameters: {'lr': 0.001516030615270181, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 820, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:43:03,599] Trial 1195 pruned.
[I 2021-05-10 18:43:04,161] Trial 1196 pruned.
[I 2021-05-10 18:43:04,753] Trial 1197 pruned.
[I 2021-05-10 18:44:05,350] Trial 1198 finished with value: 167.19183349609375 and parameters: {'lr': 0.0029738650323694227, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 684, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:44:05,681] Trial 1199 pruned.
[I 2021-05-10 18:44:06,239] Trial 1200 pruned.
[I 2021-05-10 18:44:06,581] Trial 1201 pruned.
[I 2021-05-10 18:44:07,224] Trial 1202 pruned.
[I 2021-05-10 18:44:07,794] Trial 1203 pruned.
[I 2021-05-10 18:44:08,107] Trial 1204 pruned.
[I 2021-05-10 18:44:08,597] Trial 1205 pruned.
[I 2021-05-10 18:44:11,643] Trial 1206 pruned.
[I 2021-05-10 18:44:12,295] Trial 1207 pruned.
[I 2021-05-10 18:44:12,602] Trial 1208 pruned.
[I 2021-05-10 18:44:13,238] Trial 1209 pruned.
[I 2021-05-10 18:44:13,352] Trial 1210 pruned.
[I 2021-05-10 18:44:14,002] Trial 1211 pruned.
[I 2021-05-10 18:44:14,370] Trial 1212 pruned.
[I 2021-05-10 18:44:15,036] Trial 1213 pruned.
[I 2021-05-10 18:44:15,348] Trial 1214 pruned.
[I 2021-05-10 18:45:13,938] Trial 1215 finished with value: 165.24278259277344 and parameters: {'lr': 0.0018790645173003746, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 670, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:45:14,131] Trial 1216 pruned.
[I 2021-05-10 18:45:15,259] Trial 1217 pruned.
[I 2021-05-10 18:45:15,596] Trial 1218 pruned.
[I 2021-05-10 18:45:16,198] Trial 1219 pruned.
[I 2021-05-10 18:45:16,555] Trial 1220 pruned.
[I 2021-05-10 18:45:17,129] Trial 1221 pruned.
[I 2021-05-10 18:45:17,775] Trial 1222 pruned.
[I 2021-05-10 18:45:18,327] Trial 1223 pruned.
[I 2021-05-10 18:45:18,428] Trial 1224 pruned.
[I 2021-05-10 18:45:19,081] Trial 1225 pruned.
[I 2021-05-10 18:45:19,432] Trial 1226 pruned.
[I 2021-05-10 18:45:20,016] Trial 1227 pruned.
[I 2021-05-10 18:45:20,334] Trial 1228 pruned.
[I 2021-05-10 18:45:20,989] Trial 1229 pruned.
[I 2021-05-10 18:45:21,340] Trial 1230 pruned.
[I 2021-05-10 18:45:21,912] Trial 1231 pruned.
[I 2021-05-10 18:45:22,550] Trial 1232 pruned.
[I 2021-05-10 18:45:22,898] Trial 1233 pruned.
[I 2021-05-10 18:45:23,469] Trial 1234 pruned.
[I 2021-05-10 18:45:23,784] Trial 1235 pruned.
[I 2021-05-10 18:45:24,436] Trial 1236 pruned.
[I 2021-05-10 18:45:24,762] Trial 1237 pruned.
[I 2021-05-10 18:45:25,412] Trial 1238 pruned.
[I 2021-05-10 18:45:26,067] Trial 1239 pruned.
[I 2021-05-10 18:45:26,384] Trial 1240 pruned.
[I 2021-05-10 18:45:26,506] Trial 1241 pruned.
[I 2021-05-10 18:45:27,080] Trial 1242 pruned.
[I 2021-05-10 18:45:27,433] Trial 1243 pruned.
[I 2021-05-10 18:45:27,998] Trial 1244 pruned.
[I 2021-05-10 18:45:28,635] Trial 1245 pruned.
[I 2021-05-10 18:45:28,907] Trial 1246 pruned.
[I 2021-05-10 18:45:29,087] Trial 1247 pruned.
[I 2021-05-10 18:45:29,744] Trial 1248 pruned.
[I 2021-05-10 18:45:30,063] Trial 1249 pruned.
[I 2021-05-10 18:45:30,699] Trial 1250 pruned.
[I 2021-05-10 18:45:30,999] Trial 1251 pruned.
[I 2021-05-10 18:45:31,625] Trial 1252 pruned.
[I 2021-05-10 18:45:31,742] Trial 1253 pruned.
[I 2021-05-10 18:45:32,270] Trial 1254 pruned.
[I 2021-05-10 18:45:32,604] Trial 1255 pruned.
[I 2021-05-10 18:45:33,158] Trial 1256 pruned.
[I 2021-05-10 18:45:33,505] Trial 1257 pruned.
[I 2021-05-10 18:45:34,122] Trial 1258 pruned.
[I 2021-05-10 18:45:34,430] Trial 1259 pruned.
[I 2021-05-10 18:45:35,060] Trial 1260 pruned.
[I 2021-05-10 18:45:35,634] Trial 1261 pruned.
[I 2021-05-10 18:45:35,903] Trial 1262 pruned.
[I 2021-05-10 18:45:36,529] Trial 1263 pruned.
[I 2021-05-10 18:45:36,828] Trial 1264 pruned.
[I 2021-05-10 18:45:37,441] Trial 1265 pruned.
[I 2021-05-10 18:45:37,745] Trial 1266 pruned.
[I 2021-05-10 18:45:38,384] Trial 1267 pruned.
[I 2021-05-10 18:45:38,514] Trial 1268 pruned.
[I 2021-05-10 18:45:39,128] Trial 1269 pruned.
[I 2021-05-10 18:45:39,490] Trial 1270 pruned.
[I 2021-05-10 18:45:40,101] Trial 1271 pruned.
[I 2021-05-10 18:45:40,454] Trial 1272 pruned.
[I 2021-05-10 18:45:41,032] Trial 1273 pruned.
[I 2021-05-10 18:45:41,248] Trial 1274 pruned.
[I 2021-05-10 18:45:41,839] Trial 1275 pruned.
[I 2021-05-10 18:45:42,205] Trial 1276 pruned.
[I 2021-05-10 18:45:42,760] Trial 1277 pruned.
[I 2021-05-10 18:45:43,117] Trial 1278 pruned.
[I 2021-05-10 18:45:43,696] Trial 1279 pruned.
[I 2021-05-10 18:45:44,053] Trial 1280 pruned.
[I 2021-05-10 18:45:44,691] Trial 1281 pruned.
[I 2021-05-10 18:45:44,805] Trial 1282 pruned.
[I 2021-05-10 18:45:45,445] Trial 1283 pruned.
[I 2021-05-10 18:45:45,759] Trial 1284 pruned.
[I 2021-05-10 18:45:46,260] Trial 1285 pruned.
[I 2021-05-10 18:45:46,619] Trial 1286 pruned.
[I 2021-05-10 18:45:47,180] Trial 1287 pruned.
[I 2021-05-10 18:46:17,857] Trial 1288 finished with value: 163.59222412109375 and parameters: {'lr': 0.0018421708855279688, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 720, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:46:18,218] Trial 1289 pruned.
[I 2021-05-10 18:46:18,569] Trial 1290 pruned.
[I 2021-05-10 18:46:18,880] Trial 1291 pruned.
[I 2021-05-10 18:46:19,196] Trial 1292 pruned.
[I 2021-05-10 18:46:19,550] Trial 1293 pruned.
[I 2021-05-10 18:46:19,904] Trial 1294 pruned.
[I 2021-05-10 18:46:20,216] Trial 1295 pruned.
[I 2021-05-10 18:46:20,569] Trial 1296 pruned.
[I 2021-05-10 18:46:20,880] Trial 1297 pruned.
[I 2021-05-10 18:46:21,232] Trial 1298 pruned.
[I 2021-05-10 18:46:21,544] Trial 1299 pruned.
[I 2021-05-10 18:46:21,896] Trial 1300 pruned.
[I 2021-05-10 18:46:22,213] Trial 1301 pruned.
[I 2021-05-10 18:46:22,578] Trial 1302 pruned.
[I 2021-05-10 18:46:22,928] Trial 1303 pruned.
[I 2021-05-10 18:46:23,203] Trial 1304 pruned.
[I 2021-05-10 18:46:23,514] Trial 1305 pruned.
[I 2021-05-10 18:46:23,869] Trial 1306 pruned.
[I 2021-05-10 18:46:24,180] Trial 1307 pruned.
[I 2021-05-10 18:46:24,537] Trial 1308 pruned.
[I 2021-05-10 18:46:24,851] Trial 1309 pruned.
[I 2021-05-10 18:46:25,162] Trial 1310 pruned.
[I 2021-05-10 18:46:25,514] Trial 1311 pruned.
[I 2021-05-10 18:46:25,860] Trial 1312 pruned.
[I 2021-05-10 18:46:26,223] Trial 1313 pruned.
[I 2021-05-10 18:46:26,528] Trial 1314 pruned.
[I 2021-05-10 18:46:26,843] Trial 1315 pruned.
[I 2021-05-10 18:46:26,971] Trial 1316 pruned.
[I 2021-05-10 18:46:27,318] Trial 1317 pruned.
[I 2021-05-10 18:46:27,630] Trial 1318 pruned.
[I 2021-05-10 18:46:27,813] Trial 1319 pruned.
[I 2021-05-10 18:46:28,173] Trial 1320 pruned.
[I 2021-05-10 18:46:59,231] Trial 1321 finished with value: 160.38412475585938 and parameters: {'lr': 0.0024773804467026726, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 712, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:46:59,589] Trial 1322 pruned.
[I 2021-05-10 18:46:59,944] Trial 1323 pruned.
[I 2021-05-10 18:47:00,301] Trial 1324 pruned.
[I 2021-05-10 18:47:00,985] Trial 1325 pruned.
[I 2021-05-10 18:47:01,357] Trial 1326 pruned.
[I 2021-05-10 18:47:01,711] Trial 1327 pruned.
[I 2021-05-10 18:47:02,066] Trial 1328 pruned.
[I 2021-05-10 18:47:02,426] Trial 1329 pruned.
[I 2021-05-10 18:47:02,787] Trial 1330 pruned.
[I 2021-05-10 18:47:03,141] Trial 1331 pruned.
[I 2021-05-10 18:47:03,493] Trial 1332 pruned.
[I 2021-05-10 18:47:03,848] Trial 1333 pruned.
[I 2021-05-10 18:47:04,205] Trial 1334 pruned.
[I 2021-05-10 18:47:04,566] Trial 1335 pruned.
[I 2021-05-10 18:47:04,920] Trial 1336 pruned.
[I 2021-05-10 18:47:05,268] Trial 1337 pruned.
[I 2021-05-10 18:47:05,623] Trial 1338 pruned.
[I 2021-05-10 18:47:05,979] Trial 1339 pruned.
[I 2021-05-10 18:47:06,334] Trial 1340 pruned.
[I 2021-05-10 18:47:06,686] Trial 1341 pruned.
[I 2021-05-10 18:47:07,041] Trial 1342 pruned.
[I 2021-05-10 18:47:07,391] Trial 1343 pruned.
[I 2021-05-10 18:47:07,746] Trial 1344 pruned.
[I 2021-05-10 18:47:08,114] Trial 1345 pruned.
[I 2021-05-10 18:47:08,248] Trial 1346 pruned.
[I 2021-05-10 18:47:08,614] Trial 1347 pruned.
[I 2021-05-10 18:47:08,978] Trial 1348 pruned.
[I 2021-05-10 18:47:09,338] Trial 1349 pruned.
[I 2021-05-10 18:47:09,674] Trial 1350 pruned.
[I 2021-05-10 18:47:10,029] Trial 1351 pruned.
[I 2021-05-10 18:47:10,387] Trial 1352 pruned.
[I 2021-05-10 18:47:10,518] Trial 1353 pruned.
[I 2021-05-10 18:47:10,877] Trial 1354 pruned.
[I 2021-05-10 18:47:11,189] Trial 1355 pruned.
[I 2021-05-10 18:47:11,541] Trial 1356 pruned.
[I 2021-05-10 18:47:11,852] Trial 1357 pruned.
[I 2021-05-10 18:47:12,038] Trial 1358 pruned.
[I 2021-05-10 18:47:12,384] Trial 1359 pruned.
[I 2021-05-10 18:47:12,748] Trial 1360 pruned.
[I 2021-05-10 18:47:13,074] Trial 1361 pruned.
[I 2021-05-10 18:47:13,228] Trial 1362 pruned.
[I 2021-05-10 18:47:13,576] Trial 1363 pruned.
[I 2021-05-10 18:47:13,882] Trial 1364 pruned.
[I 2021-05-10 18:47:14,238] Trial 1365 pruned.
[I 2021-05-10 18:47:14,557] Trial 1366 pruned.
[I 2021-05-10 18:47:14,916] Trial 1367 pruned.
[I 2021-05-10 18:47:15,229] Trial 1368 pruned.
[I 2021-05-10 18:47:15,367] Trial 1369 pruned.
[I 2021-05-10 18:47:15,761] Trial 1370 pruned.
[I 2021-05-10 18:47:16,101] Trial 1371 pruned.
[I 2021-05-10 18:47:16,407] Trial 1372 pruned.
[I 2021-05-10 18:47:16,772] Trial 1373 pruned.
[I 2021-05-10 18:47:16,989] Trial 1374 pruned.
[I 2021-05-10 18:47:17,296] Trial 1375 pruned.
[I 2021-05-10 18:47:17,639] Trial 1376 pruned.
[I 2021-05-10 18:47:17,969] Trial 1377 pruned.
[I 2021-05-10 18:47:18,376] Trial 1378 pruned.
[I 2021-05-10 18:47:18,503] Trial 1379 pruned.
[I 2021-05-10 18:47:18,875] Trial 1380 pruned.
[I 2021-05-10 18:47:19,234] Trial 1381 pruned.
[I 2021-05-10 18:47:19,609] Trial 1382 pruned.
[I 2021-05-10 18:47:19,934] Trial 1383 pruned.
[I 2021-05-10 18:47:20,312] Trial 1384 pruned.
[I 2021-05-10 18:47:21,022] Trial 1385 pruned.
[I 2021-05-10 18:47:21,157] Trial 1386 pruned.
[I 2021-05-10 18:47:21,489] Trial 1387 pruned.
[I 2021-05-10 18:47:22,106] Trial 1388 pruned.
[I 2021-05-10 18:47:22,465] Trial 1389 pruned.
[I 2021-05-10 18:47:24,053] Trial 1390 pruned.
[I 2021-05-10 18:47:24,351] Trial 1391 pruned.
[I 2021-05-10 18:47:24,961] Trial 1392 pruned.
[I 2021-05-10 18:47:25,302] Trial 1393 pruned.
[I 2021-05-10 18:47:25,848] Trial 1394 pruned.
[I 2021-05-10 18:47:26,179] Trial 1395 pruned.
[I 2021-05-10 18:47:26,743] Trial 1396 pruned.
[I 2021-05-10 18:47:27,069] Trial 1397 pruned.
[I 2021-05-10 18:47:27,414] Trial 1398 pruned.
[I 2021-05-10 18:47:27,971] Trial 1399 pruned.
[I 2021-05-10 18:47:28,308] Trial 1400 pruned.
[I 2021-05-10 18:47:28,850] Trial 1401 pruned.
[I 2021-05-10 18:47:29,191] Trial 1402 pruned.
[I 2021-05-10 18:48:27,077] Trial 1403 finished with value: 163.8732452392578 and parameters: {'lr': 0.0015374297927883583, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 716, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:48:27,202] Trial 1404 pruned.
[I 2021-05-10 18:48:27,827] Trial 1405 pruned.
[I 2021-05-10 18:48:28,160] Trial 1406 pruned.
[I 2021-05-10 18:48:28,810] Trial 1407 pruned.
[I 2021-05-10 18:48:29,149] Trial 1408 pruned.
[I 2021-05-10 18:48:29,785] Trial 1409 pruned.
[I 2021-05-10 18:48:30,116] Trial 1410 pruned.
[I 2021-05-10 18:48:30,746] Trial 1411 pruned.
[I 2021-05-10 18:48:31,079] Trial 1412 pruned.
[I 2021-05-10 18:49:29,454] Trial 1413 finished with value: 161.0049591064453 and parameters: {'lr': 0.0016277307586588766, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 680, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:49:30,080] Trial 1414 pruned.
[I 2021-05-10 18:49:30,415] Trial 1415 pruned.
[I 2021-05-10 18:49:31,045] Trial 1416 pruned.
[I 2021-05-10 18:49:31,382] Trial 1417 pruned.
[I 2021-05-10 18:49:32,004] Trial 1418 pruned.
[I 2021-05-10 18:49:32,340] Trial 1419 pruned.
[I 2021-05-10 18:49:32,463] Trial 1420 pruned.
[I 2021-05-10 18:49:33,095] Trial 1421 pruned.
[I 2021-05-10 18:49:33,432] Trial 1422 pruned.
[I 2021-05-10 18:49:34,059] Trial 1423 pruned.
[I 2021-05-10 18:49:34,395] Trial 1424 pruned.
[I 2021-05-10 18:49:34,591] Trial 1425 pruned.
[I 2021-05-10 18:49:35,814] Trial 1426 pruned.
[I 2021-05-10 18:49:36,156] Trial 1427 pruned.
[I 2021-05-10 18:49:36,783] Trial 1428 pruned.
[I 2021-05-10 18:49:37,114] Trial 1429 pruned.
[I 2021-05-10 18:49:37,752] Trial 1430 pruned.
[I 2021-05-10 18:49:38,091] Trial 1431 pruned.
[I 2021-05-10 18:49:38,221] Trial 1432 pruned.
[I 2021-05-10 18:49:38,847] Trial 1433 pruned.
[I 2021-05-10 18:49:39,188] Trial 1434 pruned.
[I 2021-05-10 18:49:39,795] Trial 1435 pruned.
[I 2021-05-10 18:49:40,133] Trial 1436 pruned.
[I 2021-05-10 18:49:40,756] Trial 1437 pruned.
[I 2021-05-10 18:49:41,102] Trial 1438 pruned.
[I 2021-05-10 18:49:41,715] Trial 1439 pruned.
[I 2021-05-10 18:49:42,059] Trial 1440 pruned.
[I 2021-05-10 18:49:42,668] Trial 1441 pruned.
[I 2021-05-10 18:49:43,013] Trial 1442 pruned.
[I 2021-05-10 18:49:43,647] Trial 1443 pruned.
[I 2021-05-10 18:49:43,991] Trial 1444 pruned.
[I 2021-05-10 18:49:44,594] Trial 1445 pruned.
[I 2021-05-10 18:49:44,924] Trial 1446 pruned.
[I 2021-05-10 18:49:45,048] Trial 1447 pruned.
[I 2021-05-10 18:49:45,673] Trial 1448 pruned.
[I 2021-05-10 18:49:46,008] Trial 1449 pruned.
[I 2021-05-10 18:49:46,639] Trial 1450 pruned.
[I 2021-05-10 18:49:46,975] Trial 1451 pruned.
[I 2021-05-10 18:49:47,596] Trial 1452 pruned.
[I 2021-05-10 18:49:47,939] Trial 1453 pruned.
[I 2021-05-10 18:49:48,136] Trial 1454 pruned.
[I 2021-05-10 18:49:48,753] Trial 1455 pruned.
[I 2021-05-10 18:49:49,092] Trial 1456 pruned.
[I 2021-05-10 18:49:49,718] Trial 1457 pruned.
[I 2021-05-10 18:49:50,062] Trial 1458 pruned.
[I 2021-05-10 18:49:50,687] Trial 1459 pruned.
[I 2021-05-10 18:49:51,013] Trial 1460 pruned.
[I 2021-05-10 18:49:51,646] Trial 1461 pruned.
[I 2021-05-10 18:49:51,773] Trial 1462 pruned.
[I 2021-05-10 18:49:52,112] Trial 1463 pruned.
[I 2021-05-10 18:49:52,727] Trial 1464 pruned.
[I 2021-05-10 18:49:53,074] Trial 1465 pruned.
[I 2021-05-10 18:49:53,698] Trial 1466 pruned.
[I 2021-05-10 18:49:54,044] Trial 1467 pruned.
[I 2021-05-10 18:49:54,660] Trial 1468 pruned.
[I 2021-05-10 18:49:54,995] Trial 1469 pruned.
[I 2021-05-10 18:49:55,614] Trial 1470 pruned.
[I 2021-05-10 18:49:55,955] Trial 1471 pruned.
[I 2021-05-10 18:49:56,508] Trial 1472 pruned.
[I 2021-05-10 18:49:56,847] Trial 1473 pruned.
[I 2021-05-10 18:49:57,474] Trial 1474 pruned.
[I 2021-05-10 18:49:57,775] Trial 1475 pruned.
[I 2021-05-10 18:49:58,407] Trial 1476 pruned.
[I 2021-05-10 18:49:58,521] Trial 1477 pruned.
[I 2021-05-10 18:49:58,859] Trial 1478 pruned.
[I 2021-05-10 18:49:59,488] Trial 1479 pruned.
[I 2021-05-10 18:49:59,790] Trial 1480 pruned.
[I 2021-05-10 18:50:00,415] Trial 1481 pruned.
[I 2021-05-10 18:50:00,712] Trial 1482 pruned.
[I 2021-05-10 18:50:00,909] Trial 1483 pruned.
[I 2021-05-10 18:50:09,201] Trial 1484 pruned.
[I 2021-05-10 18:50:09,511] Trial 1485 pruned.
[I 2021-05-10 18:50:10,142] Trial 1486 pruned.
[I 2021-05-10 18:50:10,479] Trial 1487 pruned.
[I 2021-05-10 18:50:11,045] Trial 1488 pruned.
[I 2021-05-10 18:50:11,389] Trial 1489 pruned.
[I 2021-05-10 18:50:18,659] Trial 1490 pruned.
[I 2021-05-10 18:50:18,783] Trial 1491 pruned.
[I 2021-05-10 18:50:19,081] Trial 1492 pruned.
[I 2021-05-10 18:50:19,709] Trial 1493 pruned.
[I 2021-05-10 18:50:20,049] Trial 1494 pruned.
[I 2021-05-10 18:50:20,606] Trial 1495 pruned.
[I 2021-05-10 18:50:20,939] Trial 1496 pruned.
[I 2021-05-10 18:50:21,575] Trial 1497 pruned.
[I 2021-05-10 18:50:21,918] Trial 1498 pruned.
[I 2021-05-10 18:50:22,460] Trial 1499 pruned.
[I 2021-05-10 18:50:22,794] Trial 1500 pruned.
[I 2021-05-10 18:50:23,353] Trial 1501 pruned.
[I 2021-05-10 18:50:23,692] Trial 1502 pruned.
[I 2021-05-10 18:50:24,324] Trial 1503 pruned.
[I 2021-05-10 18:50:24,630] Trial 1504 pruned.
[I 2021-05-10 18:50:24,755] Trial 1505 pruned.
[I 2021-05-10 18:50:25,308] Trial 1506 pruned.
[I 2021-05-10 18:50:25,650] Trial 1507 pruned.
[I 2021-05-10 18:50:26,204] Trial 1508 pruned.
[I 2021-05-10 18:50:26,542] Trial 1509 pruned.
[I 2021-05-10 18:50:27,176] Trial 1510 pruned.
[I 2021-05-10 18:50:27,479] Trial 1511 pruned.
[I 2021-05-10 18:50:27,677] Trial 1512 pruned.
[I 2021-05-10 18:50:28,231] Trial 1513 pruned.
[I 2021-05-10 18:50:28,563] Trial 1514 pruned.
[I 2021-05-10 18:50:29,199] Trial 1515 pruned.
[I 2021-05-10 18:50:29,506] Trial 1516 pruned.
[I 2021-05-10 18:50:30,130] Trial 1517 pruned.
[I 2021-05-10 18:50:30,472] Trial 1518 pruned.
[I 2021-05-10 18:50:30,588] Trial 1519 pruned.
[I 2021-05-10 18:50:38,634] Trial 1520 pruned.
[I 2021-05-10 18:50:38,936] Trial 1521 pruned.
[I 2021-05-10 18:50:40,742] Trial 1522 pruned.
[I 2021-05-10 18:50:41,068] Trial 1523 pruned.
[I 2021-05-10 18:50:41,630] Trial 1524 pruned.
[I 2021-05-10 18:50:41,974] Trial 1525 pruned.
[I 2021-05-10 18:50:42,606] Trial 1526 pruned.
[I 2021-05-10 18:50:42,909] Trial 1527 pruned.
[I 2021-05-10 18:50:43,541] Trial 1528 pruned.
[I 2021-05-10 18:50:43,838] Trial 1529 pruned.
[I 2021-05-10 18:50:46,190] Trial 1530 pruned.
[I 2021-05-10 18:50:46,529] Trial 1531 pruned.
[I 2021-05-10 18:51:37,300] Trial 1532 finished with value: 165.73143005371094 and parameters: {'lr': 0.0021159204615090444, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 970, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:51:37,632] Trial 1533 pruned.
[I 2021-05-10 18:51:37,748] Trial 1534 pruned.
[I 2021-05-10 18:51:38,362] Trial 1535 pruned.
[I 2021-05-10 18:51:38,668] Trial 1536 pruned.
[I 2021-05-10 18:51:39,287] Trial 1537 pruned.
[I 2021-05-10 18:51:39,628] Trial 1538 pruned.
[I 2021-05-10 18:51:40,179] Trial 1539 pruned.
[I 2021-05-10 18:51:40,525] Trial 1540 pruned.
[I 2021-05-10 18:51:40,722] Trial 1541 pruned.
[I 2021-05-10 18:51:41,283] Trial 1542 pruned.
[I 2021-05-10 18:51:41,626] Trial 1543 pruned.
[I 2021-05-10 18:51:42,256] Trial 1544 pruned.
[I 2021-05-10 18:51:42,559] Trial 1545 pruned.
[I 2021-05-10 18:51:43,188] Trial 1546 pruned.
[I 2021-05-10 18:51:43,503] Trial 1547 pruned.
[I 2021-05-10 18:51:44,129] Trial 1548 pruned.
[I 2021-05-10 18:51:44,260] Trial 1549 pruned.
[I 2021-05-10 18:51:44,549] Trial 1550 pruned.
[I 2021-05-10 18:51:45,182] Trial 1551 pruned.
[I 2021-05-10 18:51:45,488] Trial 1552 pruned.
[I 2021-05-10 18:51:46,120] Trial 1553 pruned.
[I 2021-05-10 18:51:46,427] Trial 1554 pruned.
[I 2021-05-10 18:51:47,052] Trial 1555 pruned.
[I 2021-05-10 18:51:47,386] Trial 1556 pruned.
[I 2021-05-10 18:51:48,001] Trial 1557 pruned.
[I 2021-05-10 18:51:48,322] Trial 1558 pruned.
[I 2021-05-10 18:51:49,511] Trial 1559 pruned.
[I 2021-05-10 18:51:49,819] Trial 1560 pruned.
[I 2021-05-10 18:51:50,444] Trial 1561 pruned.
[I 2021-05-10 18:51:50,786] Trial 1562 pruned.
[I 2021-05-10 18:51:50,900] Trial 1563 pruned.
[I 2021-05-10 18:51:51,523] Trial 1564 pruned.
[I 2021-05-10 18:51:51,866] Trial 1565 pruned.
[I 2021-05-10 18:51:52,422] Trial 1566 pruned.
[I 2021-05-10 18:51:52,770] Trial 1567 pruned.
[I 2021-05-10 18:51:53,315] Trial 1568 pruned.
[I 2021-05-10 18:51:53,658] Trial 1569 pruned.
[I 2021-05-10 18:51:53,836] Trial 1570 pruned.
[I 2021-05-10 18:51:54,464] Trial 1571 pruned.
[I 2021-05-10 18:51:54,770] Trial 1572 pruned.
[I 2021-05-10 18:51:55,414] Trial 1573 pruned.
[I 2021-05-10 18:51:55,760] Trial 1574 pruned.
[I 2021-05-10 18:51:56,393] Trial 1575 pruned.
[I 2021-05-10 18:51:56,697] Trial 1576 pruned.
[I 2021-05-10 18:51:56,826] Trial 1577 pruned.
[I 2021-05-10 18:51:57,388] Trial 1578 pruned.
[I 2021-05-10 18:51:57,734] Trial 1579 pruned.
[I 2021-05-10 18:52:00,067] Trial 1580 pruned.
[I 2021-05-10 18:52:00,375] Trial 1581 pruned.
[I 2021-05-10 18:52:03,913] Trial 1582 pruned.
[I 2021-05-10 18:52:04,237] Trial 1583 pruned.
[I 2021-05-10 18:52:04,858] Trial 1584 pruned.
[I 2021-05-10 18:52:05,163] Trial 1585 pruned.
[I 2021-05-10 18:52:05,778] Trial 1586 pruned.
[I 2021-05-10 18:52:06,114] Trial 1587 pruned.
[I 2021-05-10 18:52:06,752] Trial 1588 pruned.
[I 2021-05-10 18:52:07,056] Trial 1589 pruned.
[I 2021-05-10 18:52:07,614] Trial 1590 pruned.
[I 2021-05-10 18:52:07,954] Trial 1591 pruned.
[I 2021-05-10 18:52:08,080] Trial 1592 pruned.
[I 2021-05-10 18:52:08,636] Trial 1593 pruned.
[I 2021-05-10 18:52:08,990] Trial 1594 pruned.
[I 2021-05-10 18:52:09,610] Trial 1595 pruned.
[I 2021-05-10 18:52:09,919] Trial 1596 pruned.
[I 2021-05-10 18:52:10,552] Trial 1597 pruned.
[I 2021-05-10 18:52:10,857] Trial 1598 pruned.
[I 2021-05-10 18:52:11,474] Trial 1599 pruned.
[I 2021-05-10 18:52:11,672] Trial 1600 pruned.
[I 2021-05-10 18:52:12,023] Trial 1601 pruned.
[I 2021-05-10 18:52:12,587] Trial 1602 pruned.
[I 2021-05-10 18:52:12,928] Trial 1603 pruned.
[I 2021-05-10 18:52:14,124] Trial 1604 pruned.
[I 2021-05-10 18:52:14,432] Trial 1605 pruned.
[I 2021-05-10 18:52:14,984] Trial 1606 pruned.
[I 2021-05-10 18:52:15,113] Trial 1607 pruned.
[I 2021-05-10 18:52:15,417] Trial 1608 pruned.
[I 2021-05-10 18:52:16,042] Trial 1609 pruned.
[I 2021-05-10 18:52:16,380] Trial 1610 pruned.
[I 2021-05-10 18:52:17,015] Trial 1611 pruned.
[I 2021-05-10 18:52:17,331] Trial 1612 pruned.
[I 2021-05-10 18:52:17,954] Trial 1613 pruned.
[I 2021-05-10 18:52:18,265] Trial 1614 pruned.
[I 2021-05-10 18:52:19,497] Trial 1615 pruned.
[I 2021-05-10 18:52:19,839] Trial 1616 pruned.
[I 2021-05-10 18:52:20,394] Trial 1617 pruned.
[I 2021-05-10 18:52:20,735] Trial 1618 pruned.
[I 2021-05-10 18:52:21,305] Trial 1619 pruned.
[I 2021-05-10 18:52:21,647] Trial 1620 pruned.
[I 2021-05-10 18:52:22,277] Trial 1621 pruned.
[I 2021-05-10 18:52:22,393] Trial 1622 pruned.
[I 2021-05-10 18:52:22,731] Trial 1623 pruned.
[I 2021-05-10 18:52:23,294] Trial 1624 pruned.
[I 2021-05-10 18:52:23,640] Trial 1625 pruned.
[I 2021-05-10 18:52:24,272] Trial 1626 pruned.
[I 2021-05-10 18:52:24,614] Trial 1627 pruned.
[I 2021-05-10 18:52:25,173] Trial 1628 pruned.
[I 2021-05-10 18:52:25,479] Trial 1629 pruned.
[I 2021-05-10 18:52:25,687] Trial 1630 pruned.
[I 2021-05-10 18:52:26,319] Trial 1631 pruned.
[I 2021-05-10 18:52:26,633] Trial 1632 pruned.
[I 2021-05-10 18:52:27,271] Trial 1633 pruned.
[I 2021-05-10 18:52:27,616] Trial 1634 pruned.
[I 2021-05-10 18:52:28,176] Trial 1635 pruned.
[I 2021-05-10 18:52:28,313] Trial 1636 pruned.
[I 2021-05-10 18:52:28,660] Trial 1637 pruned.
[I 2021-05-10 18:52:29,204] Trial 1638 pruned.
[I 2021-05-10 18:52:29,516] Trial 1639 pruned.
[I 2021-05-10 18:52:30,121] Trial 1640 pruned.
[I 2021-05-10 18:52:30,453] Trial 1641 pruned.
[I 2021-05-10 18:52:31,091] Trial 1642 pruned.
[I 2021-05-10 18:52:31,394] Trial 1643 pruned.
[I 2021-05-10 18:52:32,034] Trial 1644 pruned.
[I 2021-05-10 18:52:32,347] Trial 1645 pruned.
[I 2021-05-10 18:52:32,977] Trial 1646 pruned.
[I 2021-05-10 18:52:33,324] Trial 1647 pruned.
[I 2021-05-10 18:52:33,910] Trial 1648 pruned.
[I 2021-05-10 18:52:34,244] Trial 1649 pruned.
[I 2021-05-10 18:52:34,375] Trial 1650 pruned.
[I 2021-05-10 18:52:34,934] Trial 1651 pruned.
[I 2021-05-10 18:52:35,274] Trial 1652 pruned.
[I 2021-05-10 18:52:35,830] Trial 1653 pruned.
[I 2021-05-10 18:52:36,174] Trial 1654 pruned.
[I 2021-05-10 18:52:36,752] Trial 1655 pruned.
[I 2021-05-10 18:53:06,323] Trial 1656 finished with value: 160.28424072265625 and parameters: {'lr': 0.002964041922091379, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 978, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:53:35,798] Trial 1657 finished with value: 160.0515899658203 and parameters: {'lr': 0.0032936111258550092, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1000, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:53:36,145] Trial 1658 pruned.
[I 2021-05-10 18:53:40,278] Trial 1659 pruned.
[I 2021-05-10 18:53:40,625] Trial 1660 pruned.
[I 2021-05-10 18:53:40,833] Trial 1661 pruned.
[I 2021-05-10 18:53:41,469] Trial 1662 pruned.
[I 2021-05-10 18:53:41,823] Trial 1663 pruned.
[I 2021-05-10 18:53:42,424] Trial 1664 pruned.
[I 2021-05-10 18:53:42,772] Trial 1665 pruned.
[I 2021-05-10 18:53:43,975] Trial 1666 pruned.
[I 2021-05-10 18:53:44,110] Trial 1667 pruned.
[I 2021-05-10 18:53:44,458] Trial 1668 pruned.
[I 2021-05-10 18:53:45,078] Trial 1669 pruned.
[I 2021-05-10 18:53:45,425] Trial 1670 pruned.
[I 2021-05-10 18:53:46,072] Trial 1671 pruned.
[I 2021-05-10 18:53:46,426] Trial 1672 pruned.
[I 2021-05-10 18:53:47,055] Trial 1673 pruned.
[I 2021-05-10 18:53:47,403] Trial 1674 pruned.
[I 2021-05-10 18:53:48,045] Trial 1675 pruned.
[I 2021-05-10 18:53:48,395] Trial 1676 pruned.
[I 2021-05-10 18:53:49,025] Trial 1677 pruned.
[I 2021-05-10 18:53:49,373] Trial 1678 pruned.
[I 2021-05-10 18:53:49,971] Trial 1679 pruned.
[I 2021-05-10 18:53:50,319] Trial 1680 pruned.
[I 2021-05-10 18:53:50,958] Trial 1681 pruned.
[I 2021-05-10 18:53:51,091] Trial 1682 pruned.
[I 2021-05-10 18:53:51,440] Trial 1683 pruned.
[I 2021-05-10 18:53:52,086] Trial 1684 pruned.
[I 2021-05-10 18:53:52,435] Trial 1685 pruned.
[I 2021-05-10 18:53:53,070] Trial 1686 pruned.
[I 2021-05-10 18:53:53,419] Trial 1687 pruned.
[I 2021-05-10 18:53:54,066] Trial 1688 pruned.
[I 2021-05-10 18:53:55,599] Trial 1689 pruned.
[I 2021-05-10 18:53:56,229] Trial 1690 pruned.
[I 2021-05-10 18:53:56,434] Trial 1691 pruned.
[I 2021-05-10 18:54:26,648] Trial 1692 finished with value: 159.63648986816406 and parameters: {'lr': 0.0028634783346392933, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1022, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:54:26,996] Trial 1693 pruned.
[I 2021-05-10 18:54:27,636] Trial 1694 pruned.
[I 2021-05-10 18:54:28,283] Trial 1695 pruned.
[I 2021-05-10 18:54:28,922] Trial 1696 pruned.
[I 2021-05-10 18:54:29,272] Trial 1697 pruned.
[I 2021-05-10 18:54:29,411] Trial 1698 pruned.
[I 2021-05-10 18:54:30,050] Trial 1699 pruned.
[I 2021-05-10 18:54:30,395] Trial 1700 pruned.
[I 2021-05-10 18:54:31,025] Trial 1701 pruned.
[I 2021-05-10 18:54:31,369] Trial 1702 pruned.
[I 2021-05-10 18:54:32,594] Trial 1703 pruned.
[I 2021-05-10 18:54:32,946] Trial 1704 pruned.
[I 2021-05-10 18:54:33,580] Trial 1705 pruned.
[I 2021-05-10 18:54:33,941] Trial 1706 pruned.
[I 2021-05-10 18:54:34,585] Trial 1707 pruned.
[I 2021-05-10 18:55:04,508] Trial 1708 finished with value: 175.1967010498047 and parameters: {'lr': 0.0031957401492412525, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 982, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:55:05,175] Trial 1709 pruned.
[I 2021-05-10 18:55:05,545] Trial 1710 pruned.
[I 2021-05-10 18:55:06,184] Trial 1711 pruned.
[I 2021-05-10 18:55:06,535] Trial 1712 pruned.
[I 2021-05-10 18:55:06,664] Trial 1713 pruned.
[I 2021-05-10 18:56:04,918] Trial 1714 finished with value: 161.48858642578125 and parameters: {'lr': 0.003653542937403679, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 982, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:56:06,126] Trial 1715 pruned.
[I 2021-05-10 18:56:06,770] Trial 1716 pruned.
[I 2021-05-10 18:56:07,399] Trial 1717 pruned.
[I 2021-05-10 18:56:08,030] Trial 1718 pruned.
[I 2021-05-10 18:56:09,236] Trial 1719 pruned.
[I 2021-05-10 18:56:09,871] Trial 1720 pruned.
[I 2021-05-10 18:56:10,506] Trial 1721 pruned.
[I 2021-05-10 18:56:11,142] Trial 1722 pruned.
[I 2021-05-10 18:56:11,778] Trial 1723 pruned.
[I 2021-05-10 18:56:12,408] Trial 1724 pruned.
[I 2021-05-10 18:56:13,040] Trial 1725 pruned.
[I 2021-05-10 18:56:13,668] Trial 1726 pruned.
[I 2021-05-10 18:56:14,308] Trial 1727 pruned.
[I 2021-05-10 18:56:14,941] Trial 1728 pruned.
[I 2021-05-10 18:56:15,573] Trial 1729 pruned.
[I 2021-05-10 18:56:16,202] Trial 1730 pruned.
[I 2021-05-10 18:56:16,835] Trial 1731 pruned.
[I 2021-05-10 18:56:17,470] Trial 1732 pruned.
[I 2021-05-10 18:56:18,103] Trial 1733 pruned.
[I 2021-05-10 18:56:18,744] Trial 1734 pruned.
[I 2021-05-10 18:57:16,788] Trial 1735 finished with value: 173.24549865722656 and parameters: {'lr': 0.003322865737305605, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 976, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:57:17,427] Trial 1736 pruned.
[I 2021-05-10 18:57:18,060] Trial 1737 pruned.
[I 2021-05-10 18:57:21,073] Trial 1738 pruned.
[I 2021-05-10 18:57:21,710] Trial 1739 pruned.
[I 2021-05-10 18:57:22,343] Trial 1740 pruned.
[I 2021-05-10 18:57:22,983] Trial 1741 pruned.
[I 2021-05-10 18:57:23,621] Trial 1742 pruned.
[I 2021-05-10 18:57:24,262] Trial 1743 pruned.
[I 2021-05-10 18:57:24,882] Trial 1744 pruned.
[I 2021-05-10 18:57:25,518] Trial 1745 pruned.
[I 2021-05-10 18:57:26,173] Trial 1746 pruned.
[I 2021-05-10 18:57:26,399] Trial 1747 pruned.
[I 2021-05-10 18:57:27,030] Trial 1748 pruned.
[I 2021-05-10 18:57:27,690] Trial 1749 pruned.
[I 2021-05-10 18:57:28,313] Trial 1750 pruned.
[I 2021-05-10 18:57:28,444] Trial 1751 pruned.
[I 2021-05-10 18:58:27,372] Trial 1752 finished with value: 159.57095336914062 and parameters: {'lr': 0.003281864626984977, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 968, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:58:28,015] Trial 1753 pruned.
[I 2021-05-10 18:58:28,656] Trial 1754 pruned.
[I 2021-05-10 18:58:29,302] Trial 1755 pruned.
[I 2021-05-10 18:58:29,945] Trial 1756 pruned.
[I 2021-05-10 18:58:30,583] Trial 1757 pruned.
[I 2021-05-10 18:58:31,222] Trial 1758 pruned.
[I 2021-05-10 18:58:31,858] Trial 1759 pruned.
[I 2021-05-10 18:58:31,997] Trial 1760 pruned.
[I 2021-05-10 18:58:32,640] Trial 1761 pruned.
[I 2021-05-10 18:58:33,283] Trial 1762 pruned.
[I 2021-05-10 18:58:33,921] Trial 1763 pruned.
[I 2021-05-10 18:58:34,562] Trial 1764 pruned.
[I 2021-05-10 18:58:34,773] Trial 1765 pruned.
[I 2021-05-10 18:58:35,414] Trial 1766 pruned.
[I 2021-05-10 18:58:36,048] Trial 1767 pruned.
[I 2021-05-10 18:58:36,685] Trial 1768 pruned.
[I 2021-05-10 18:59:36,093] Trial 1769 finished with value: 185.95370483398438 and parameters: {'lr': 0.0031253679527887346, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 982, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu', 'HL4_ac_fn': 'relu'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 18:59:36,241] Trial 1770 pruned.
[I 2021-05-10 18:59:36,888] Trial 1771 pruned.
[I 2021-05-10 18:59:37,539] Trial 1772 pruned.
[I 2021-05-10 18:59:38,185] Trial 1773 pruned.
[I 2021-05-10 18:59:38,833] Trial 1774 pruned.
[I 2021-05-10 18:59:39,479] Trial 1775 pruned.
[I 2021-05-10 18:59:40,131] Trial 1776 pruned.
[I 2021-05-10 18:59:40,276] Trial 1777 pruned.
[I 2021-05-10 18:59:40,931] Trial 1778 pruned.
[I 2021-05-10 18:59:41,576] Trial 1779 pruned.
[I 2021-05-10 18:59:42,214] Trial 1780 pruned.
[I 2021-05-10 18:59:42,858] Trial 1781 pruned.
[I 2021-05-10 18:59:43,492] Trial 1782 pruned.
[I 2021-05-10 18:59:43,709] Trial 1783 pruned.
[I 2021-05-10 18:59:44,354] Trial 1784 pruned.
[I 2021-05-10 18:59:44,997] Trial 1785 pruned.
[I 2021-05-10 18:59:45,356] Trial 1786 pruned.
[I 2021-05-10 18:59:45,502] Trial 1787 pruned.
[I 2021-05-10 18:59:46,138] Trial 1788 pruned.
[I 2021-05-10 18:59:46,498] Trial 1789 pruned.
[I 2021-05-10 18:59:47,711] Trial 1790 pruned.
[I 2021-05-10 18:59:48,067] Trial 1791 pruned.
[I 2021-05-10 18:59:48,708] Trial 1792 pruned.
[I 2021-05-10 18:59:49,065] Trial 1793 pruned.
[I 2021-05-10 18:59:57,195] Trial 1794 pruned.
[I 2021-05-10 18:59:57,822] Trial 1795 pruned.
[I 2021-05-10 18:59:58,177] Trial 1796 pruned.
[I 2021-05-10 18:59:58,821] Trial 1797 pruned.
[I 2021-05-10 18:59:59,178] Trial 1798 pruned.
[I 2021-05-10 18:59:59,826] Trial 1799 pruned.
[I 2021-05-10 19:00:00,190] Trial 1800 pruned.
[I 2021-05-10 19:00:00,349] Trial 1801 pruned.
[I 2021-05-10 19:00:01,003] Trial 1802 pruned.
[I 2021-05-10 19:00:01,364] Trial 1803 pruned.
[I 2021-05-10 19:00:59,414] Trial 1804 finished with value: 162.504150390625 and parameters: {'lr': 0.0015285445499027377, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 19:01:00,062] Trial 1805 pruned.
[I 2021-05-10 19:01:00,705] Trial 1806 pruned.
[I 2021-05-10 19:01:58,873] Trial 1807 finished with value: 160.08236694335938 and parameters: {'lr': 0.0013861955018362928, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1022, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 549 with value: 159.330078125.
[I 2021-05-10 19:01:59,507] Trial 1808 pruned.
[I 2021-05-10 19:02:00,156] Trial 1809 pruned.
[I 2021-05-10 19:02:00,798] Trial 1810 pruned.
[I 2021-05-10 19:02:01,448] Trial 1811 pruned.
[I 2021-05-10 19:02:02,094] Trial 1812 pruned.
[I 2021-05-10 19:02:02,742] Trial 1813 pruned.
[I 2021-05-10 19:02:03,384] Trial 1814 pruned.
[I 2021-05-10 19:02:04,031] Trial 1815 pruned.
[I 2021-05-10 19:02:04,670] Trial 1816 pruned.
[I 2021-05-10 19:02:05,310] Trial 1817 pruned.
[I 2021-05-10 19:02:05,954] Trial 1818 pruned.
[I 2021-05-10 19:02:07,753] Trial 1819 pruned.
[I 2021-05-10 19:02:08,416] Trial 1820 pruned.
[I 2021-05-10 19:02:09,071] Trial 1821 pruned.
[I 2021-05-10 19:02:10,857] Trial 1822 pruned.
[I 2021-05-10 19:02:11,509] Trial 1823 pruned.
[I 2021-05-10 19:03:11,291] Trial 1824 finished with value: 159.19020080566406 and parameters: {'lr': 0.001217433386708687, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1022, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1824 with value: 159.19020080566406.
[I 2021-05-10 19:03:11,968] Trial 1825 pruned.
[I 2021-05-10 19:03:12,654] Trial 1826 pruned.
[I 2021-05-10 19:03:13,331] Trial 1827 pruned.
[I 2021-05-10 19:03:14,007] Trial 1828 pruned.
[I 2021-05-10 19:03:14,683] Trial 1829 pruned.
[I 2021-05-10 19:03:16,580] Trial 1830 pruned.
[I 2021-05-10 19:03:17,256] Trial 1831 pruned.
[I 2021-05-10 19:03:17,931] Trial 1832 pruned.
[I 2021-05-10 19:03:18,609] Trial 1833 pruned.
[I 2021-05-10 19:03:19,279] Trial 1834 pruned.
[I 2021-05-10 19:03:19,942] Trial 1835 pruned.
[I 2021-05-10 19:03:20,615] Trial 1836 pruned.
[I 2021-05-10 19:03:21,284] Trial 1837 pruned.
[I 2021-05-10 19:03:21,962] Trial 1838 pruned.
[I 2021-05-10 19:03:22,644] Trial 1839 pruned.
[I 2021-05-10 19:03:23,314] Trial 1840 pruned.
[I 2021-05-10 19:03:23,992] Trial 1841 pruned.
[I 2021-05-10 19:04:23,416] Trial 1842 finished with value: 161.08192443847656 and parameters: {'lr': 0.0010835484109226298, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1004, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1824 with value: 159.19020080566406.
[I 2021-05-10 19:04:24,629] Trial 1843 pruned.
[I 2021-05-10 19:04:25,269] Trial 1844 pruned.
[I 2021-05-10 19:04:25,913] Trial 1845 pruned.
[I 2021-05-10 19:04:26,586] Trial 1846 pruned.
[I 2021-05-10 19:04:27,224] Trial 1847 pruned.
[I 2021-05-10 19:04:27,860] Trial 1848 pruned.
[I 2021-05-10 19:04:28,508] Trial 1849 pruned.
[I 2021-05-10 19:04:29,147] Trial 1850 pruned.
[I 2021-05-10 19:04:29,802] Trial 1851 pruned.
[I 2021-05-10 19:04:30,457] Trial 1852 pruned.
[I 2021-05-10 19:04:31,105] Trial 1853 pruned.
[I 2021-05-10 19:04:31,746] Trial 1854 pruned.
[I 2021-05-10 19:04:32,383] Trial 1855 pruned.
[I 2021-05-10 19:04:33,023] Trial 1856 pruned.
[I 2021-05-10 19:04:33,665] Trial 1857 pruned.
[I 2021-05-10 19:05:32,245] Trial 1858 finished with value: 164.095458984375 and parameters: {'lr': 0.0013292241430977317, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 960, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1824 with value: 159.19020080566406.
[I 2021-05-10 19:05:32,917] Trial 1859 pruned.
[I 2021-05-10 19:05:33,614] Trial 1860 pruned.
[I 2021-05-10 19:05:34,309] Trial 1861 pruned.
[I 2021-05-10 19:05:34,987] Trial 1862 pruned.
[I 2021-05-10 19:05:35,677] Trial 1863 pruned.
[I 2021-05-10 19:05:40,136] Trial 1864 pruned.
[I 2021-05-10 19:05:40,832] Trial 1865 pruned.
[I 2021-05-10 19:05:41,527] Trial 1866 pruned.
[I 2021-05-10 19:05:42,217] Trial 1867 pruned.
[I 2021-05-10 19:05:42,909] Trial 1868 pruned.
[I 2021-05-10 19:05:43,602] Trial 1869 pruned.
[I 2021-05-10 19:05:44,298] Trial 1870 pruned.
[I 2021-05-10 19:05:44,991] Trial 1871 pruned.
[I 2021-05-10 19:05:45,684] Trial 1872 pruned.
[I 2021-05-10 19:05:46,376] Trial 1873 pruned.
[I 2021-05-10 19:05:46,605] Trial 1874 pruned.
[I 2021-05-10 19:05:47,294] Trial 1875 pruned.
[I 2021-05-10 19:05:47,445] Trial 1876 pruned.
[I 2021-05-10 19:05:48,134] Trial 1877 pruned.
[I 2021-05-10 19:05:48,827] Trial 1878 pruned.
[I 2021-05-10 19:05:49,521] Trial 1879 pruned.
[I 2021-05-10 19:05:50,216] Trial 1880 pruned.
[I 2021-05-10 19:05:50,907] Trial 1881 pruned.
[I 2021-05-10 19:05:51,597] Trial 1882 pruned.
[I 2021-05-10 19:05:52,287] Trial 1883 pruned.
[I 2021-05-10 19:05:52,979] Trial 1884 pruned.
[I 2021-05-10 19:05:53,132] Trial 1885 pruned.
[I 2021-05-10 19:05:53,830] Trial 1886 pruned.
[I 2021-05-10 19:05:54,520] Trial 1887 pruned.
[I 2021-05-10 19:05:55,202] Trial 1888 pruned.
[I 2021-05-10 19:05:55,884] Trial 1889 pruned.
[I 2021-05-10 19:05:56,574] Trial 1890 pruned.
[I 2021-05-10 19:05:57,268] Trial 1891 pruned.
[I 2021-05-10 19:05:57,961] Trial 1892 pruned.
[I 2021-05-10 19:05:58,193] Trial 1893 pruned.
[I 2021-05-10 19:05:58,888] Trial 1894 pruned.
[I 2021-05-10 19:05:59,042] Trial 1895 pruned.
[I 2021-05-10 19:05:59,735] Trial 1896 pruned.
[I 2021-05-10 19:06:00,429] Trial 1897 pruned.
[I 2021-05-10 19:06:01,116] Trial 1898 pruned.
[I 2021-05-10 19:06:01,833] Trial 1899 pruned.
[I 2021-05-10 19:06:02,528] Trial 1900 pruned.
[I 2021-05-10 19:06:03,225] Trial 1901 pruned.
[I 2021-05-10 19:06:03,907] Trial 1902 pruned.
[I 2021-05-10 19:06:04,595] Trial 1903 pruned.
[I 2021-05-10 19:06:04,750] Trial 1904 pruned.
[I 2021-05-10 19:06:05,442] Trial 1905 pruned.
[I 2021-05-10 19:06:06,144] Trial 1906 pruned.
[I 2021-05-10 19:06:06,839] Trial 1907 pruned.
[I 2021-05-10 19:06:07,535] Trial 1908 pruned.
[I 2021-05-10 19:06:08,231] Trial 1909 pruned.
[I 2021-05-10 19:06:08,457] Trial 1910 pruned.
[I 2021-05-10 19:06:09,146] Trial 1911 pruned.
[I 2021-05-10 19:06:09,836] Trial 1912 pruned.
[I 2021-05-10 19:06:10,531] Trial 1913 pruned.
[I 2021-05-10 19:06:10,675] Trial 1914 pruned.
[I 2021-05-10 19:06:11,364] Trial 1915 pruned.
[I 2021-05-10 19:06:12,059] Trial 1916 pruned.
[I 2021-05-10 19:06:12,724] Trial 1917 pruned.
[I 2021-05-10 19:06:13,421] Trial 1918 pruned.
[I 2021-05-10 19:06:14,119] Trial 1919 pruned.
[I 2021-05-10 19:06:14,804] Trial 1920 pruned.
[I 2021-05-10 19:06:15,486] Trial 1921 pruned.
[I 2021-05-10 19:06:16,182] Trial 1922 pruned.
[I 2021-05-10 19:06:16,334] Trial 1923 pruned.
[I 2021-05-10 19:06:17,032] Trial 1924 pruned.
[I 2021-05-10 19:06:17,724] Trial 1925 pruned.
[I 2021-05-10 19:06:18,421] Trial 1926 pruned.
[I 2021-05-10 19:07:17,480] Trial 1927 finished with value: 158.8334197998047 and parameters: {'lr': 0.0012782353276565707, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 638, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:07:18,147] Trial 1928 pruned.
[I 2021-05-10 19:07:18,823] Trial 1929 pruned.
[I 2021-05-10 19:07:19,492] Trial 1930 pruned.
[I 2021-05-10 19:07:20,163] Trial 1931 pruned.
[I 2021-05-10 19:07:20,831] Trial 1932 pruned.
[I 2021-05-10 19:07:21,501] Trial 1933 pruned.
[I 2021-05-10 19:07:22,172] Trial 1934 pruned.
[I 2021-05-10 19:07:22,843] Trial 1935 pruned.
[I 2021-05-10 19:07:23,512] Trial 1936 pruned.
[I 2021-05-10 19:07:24,179] Trial 1937 pruned.
[I 2021-05-10 19:07:24,842] Trial 1938 pruned.
[I 2021-05-10 19:07:25,505] Trial 1939 pruned.
[I 2021-05-10 19:07:26,174] Trial 1940 pruned.
[I 2021-05-10 19:07:26,838] Trial 1941 pruned.
[I 2021-05-10 19:07:27,505] Trial 1942 pruned.
[I 2021-05-10 19:07:28,175] Trial 1943 pruned.
[I 2021-05-10 19:07:28,851] Trial 1944 pruned.
[I 2021-05-10 19:07:29,519] Trial 1945 pruned.
[I 2021-05-10 19:07:30,188] Trial 1946 pruned.
[I 2021-05-10 19:07:30,850] Trial 1947 pruned.
[I 2021-05-10 19:07:31,524] Trial 1948 pruned.
[I 2021-05-10 19:07:32,193] Trial 1949 pruned.
[I 2021-05-10 19:07:33,463] Trial 1950 pruned.
[I 2021-05-10 19:07:34,129] Trial 1951 pruned.
[I 2021-05-10 19:07:34,803] Trial 1952 pruned.
[I 2021-05-10 19:07:35,472] Trial 1953 pruned.
[I 2021-05-10 19:07:36,141] Trial 1954 pruned.
[I 2021-05-10 19:07:36,814] Trial 1955 pruned.
[I 2021-05-10 19:07:37,489] Trial 1956 pruned.
[I 2021-05-10 19:07:38,156] Trial 1957 pruned.
[I 2021-05-10 19:07:38,819] Trial 1958 pruned.
[I 2021-05-10 19:07:39,488] Trial 1959 pruned.
[I 2021-05-10 19:07:40,178] Trial 1960 pruned.
[I 2021-05-10 19:08:39,746] Trial 1961 finished with value: 163.38658142089844 and parameters: {'lr': 0.0013988689443266728, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 674, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:08:40,389] Trial 1962 pruned.
[I 2021-05-10 19:08:41,030] Trial 1963 pruned.
[I 2021-05-10 19:08:41,669] Trial 1964 pruned.
[I 2021-05-10 19:08:41,879] Trial 1965 pruned.
[I 2021-05-10 19:08:42,532] Trial 1966 pruned.
[I 2021-05-10 19:08:42,675] Trial 1967 pruned.
[I 2021-05-10 19:08:43,356] Trial 1968 pruned.
[I 2021-05-10 19:08:44,025] Trial 1969 pruned.
[I 2021-05-10 19:08:44,697] Trial 1970 pruned.
[I 2021-05-10 19:08:45,372] Trial 1971 pruned.
[I 2021-05-10 19:08:46,051] Trial 1972 pruned.
[I 2021-05-10 19:08:46,722] Trial 1973 pruned.
[I 2021-05-10 19:08:47,395] Trial 1974 pruned.
[I 2021-05-10 19:08:48,054] Trial 1975 pruned.
[I 2021-05-10 19:08:48,203] Trial 1976 pruned.
[I 2021-05-10 19:08:48,873] Trial 1977 pruned.
[I 2021-05-10 19:08:49,544] Trial 1978 pruned.
[I 2021-05-10 19:08:50,218] Trial 1979 pruned.
[I 2021-05-10 19:08:50,889] Trial 1980 pruned.
[I 2021-05-10 19:08:51,562] Trial 1981 pruned.
[I 2021-05-10 19:08:52,239] Trial 1982 pruned.
[I 2021-05-10 19:08:52,492] Trial 1983 pruned.
[I 2021-05-10 19:08:53,188] Trial 1984 pruned.
[I 2021-05-10 19:08:53,864] Trial 1985 pruned.
[I 2021-05-10 19:08:54,535] Trial 1986 pruned.
[I 2021-05-10 19:08:54,687] Trial 1987 pruned.
[I 2021-05-10 19:08:55,380] Trial 1988 pruned.
[I 2021-05-10 19:08:56,053] Trial 1989 pruned.
[I 2021-05-10 19:08:56,748] Trial 1990 pruned.
[I 2021-05-10 19:08:57,446] Trial 1991 pruned.
[I 2021-05-10 19:09:59,188] Trial 1992 finished with value: 161.87440490722656 and parameters: {'lr': 0.0015107520766807618, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 678, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:09:59,860] Trial 1993 pruned.
[I 2021-05-10 19:10:00,562] Trial 1994 pruned.
[I 2021-05-10 19:10:01,243] Trial 1995 pruned.
[I 2021-05-10 19:10:01,396] Trial 1996 pruned.
[I 2021-05-10 19:10:02,066] Trial 1997 pruned.
[I 2021-05-10 19:10:02,765] Trial 1998 pruned.
[I 2021-05-10 19:10:03,463] Trial 1999 pruned.
[I 2021-05-10 19:10:04,143] Trial 2000 pruned.
[I 2021-05-10 19:10:04,818] Trial 2001 pruned.
[I 2021-05-10 19:10:05,510] Trial 2002 pruned.
[I 2021-05-10 19:10:05,736] Trial 2003 pruned.
[I 2021-05-10 19:10:06,419] Trial 2004 pruned.
[I 2021-05-10 19:10:07,119] Trial 2005 pruned.
[I 2021-05-10 19:10:07,816] Trial 2006 pruned.
[I 2021-05-10 19:10:07,971] Trial 2007 pruned.
[I 2021-05-10 19:10:08,648] Trial 2008 pruned.
[I 2021-05-10 19:10:09,333] Trial 2009 pruned.
[I 2021-05-10 19:10:10,012] Trial 2010 pruned.
[I 2021-05-10 19:10:10,697] Trial 2011 pruned.
[I 2021-05-10 19:10:11,397] Trial 2012 pruned.
[I 2021-05-10 19:10:12,100] Trial 2013 pruned.
[I 2021-05-10 19:10:12,798] Trial 2014 pruned.
[I 2021-05-10 19:10:13,495] Trial 2015 pruned.
[I 2021-05-10 19:10:13,651] Trial 2016 pruned.
[I 2021-05-10 19:10:14,331] Trial 2017 pruned.
[I 2021-05-10 19:10:15,006] Trial 2018 pruned.
[I 2021-05-10 19:10:15,691] Trial 2019 pruned.
[I 2021-05-10 19:10:16,366] Trial 2020 pruned.
[I 2021-05-10 19:10:17,063] Trial 2021 pruned.
[I 2021-05-10 19:10:17,742] Trial 2022 pruned.
[I 2021-05-10 19:10:18,424] Trial 2023 pruned.
[I 2021-05-10 19:10:18,647] Trial 2024 pruned.
[I 2021-05-10 19:10:19,347] Trial 2025 pruned.
[I 2021-05-10 19:10:20,045] Trial 2026 pruned.
[I 2021-05-10 19:10:20,201] Trial 2027 pruned.
[I 2021-05-10 19:10:20,889] Trial 2028 pruned.
[I 2021-05-10 19:10:21,571] Trial 2029 pruned.
[I 2021-05-10 19:10:22,896] Trial 2030 pruned.
[I 2021-05-10 19:10:23,485] Trial 2031 pruned.
[I 2021-05-10 19:10:24,169] Trial 2032 pruned.
[I 2021-05-10 19:10:24,870] Trial 2033 pruned.
[I 2021-05-10 19:10:25,545] Trial 2034 pruned.
[I 2021-05-10 19:10:26,243] Trial 2035 pruned.
[I 2021-05-10 19:10:26,402] Trial 2036 pruned.
[I 2021-05-10 19:10:27,079] Trial 2037 pruned.
[I 2021-05-10 19:10:27,760] Trial 2038 pruned.
[I 2021-05-10 19:10:28,459] Trial 2039 pruned.
[I 2021-05-10 19:10:29,745] Trial 2040 pruned.
[I 2021-05-10 19:10:30,419] Trial 2041 pruned.
[I 2021-05-10 19:10:31,118] Trial 2042 pruned.
[I 2021-05-10 19:10:33,067] Trial 2043 pruned.
[I 2021-05-10 19:10:33,295] Trial 2044 pruned.
[I 2021-05-10 19:10:34,572] Trial 2045 pruned.
[I 2021-05-10 19:10:35,264] Trial 2046 pruned.
[I 2021-05-10 19:10:35,415] Trial 2047 pruned.
[I 2021-05-10 19:10:37,360] Trial 2048 pruned.
[I 2021-05-10 19:10:38,043] Trial 2049 pruned.
[I 2021-05-10 19:10:38,736] Trial 2050 pruned.
[I 2021-05-10 19:10:39,425] Trial 2051 pruned.
[I 2021-05-10 19:10:40,102] Trial 2052 pruned.
[I 2021-05-10 19:10:40,807] Trial 2053 pruned.
[I 2021-05-10 19:10:41,479] Trial 2054 pruned.
[I 2021-05-10 19:10:42,169] Trial 2055 pruned.
[I 2021-05-10 19:10:42,331] Trial 2056 pruned.
[I 2021-05-10 19:10:43,033] Trial 2057 pruned.
[I 2021-05-10 19:10:43,707] Trial 2058 pruned.
[I 2021-05-10 19:10:44,382] Trial 2059 pruned.
[I 2021-05-10 19:10:44,957] Trial 2060 pruned.
[I 2021-05-10 19:10:45,631] Trial 2061 pruned.
[I 2021-05-10 19:10:46,310] Trial 2062 pruned.
[I 2021-05-10 19:10:47,012] Trial 2063 pruned.
[I 2021-05-10 19:10:47,239] Trial 2064 pruned.
[I 2021-05-10 19:10:47,679] Trial 2065 pruned.
[I 2021-05-10 19:10:49,563] Trial 2066 pruned.
[I 2021-05-10 19:10:49,724] Trial 2067 pruned.
[I 2021-05-10 19:10:50,396] Trial 2068 pruned.
[I 2021-05-10 19:10:51,097] Trial 2069 pruned.
[I 2021-05-10 19:10:51,790] Trial 2070 pruned.
[I 2021-05-10 19:10:52,487] Trial 2071 pruned.
[I 2021-05-10 19:10:53,174] Trial 2072 pruned.
[I 2021-05-10 19:10:53,858] Trial 2073 pruned.
[I 2021-05-10 19:10:54,557] Trial 2074 pruned.
[I 2021-05-10 19:10:55,257] Trial 2075 pruned.
[I 2021-05-10 19:10:56,581] Trial 2076 pruned.
[I 2021-05-10 19:10:57,265] Trial 2077 pruned.
[I 2021-05-10 19:10:57,419] Trial 2078 pruned.
[I 2021-05-10 19:10:58,103] Trial 2079 pruned.
[I 2021-05-10 19:10:58,812] Trial 2080 pruned.
[I 2021-05-10 19:10:59,495] Trial 2081 pruned.
[I 2021-05-10 19:11:00,181] Trial 2082 pruned.
[I 2021-05-10 19:11:00,896] Trial 2083 pruned.
[I 2021-05-10 19:11:01,126] Trial 2084 pruned.
[I 2021-05-10 19:11:01,843] Trial 2085 pruned.
[I 2021-05-10 19:11:02,521] Trial 2086 pruned.
[I 2021-05-10 19:11:02,682] Trial 2087 pruned.
[I 2021-05-10 19:11:05,898] Trial 2088 pruned.
[I 2021-05-10 19:11:06,577] Trial 2089 pruned.
[I 2021-05-10 19:11:06,950] Trial 2090 pruned.
[I 2021-05-10 19:11:07,635] Trial 2091 pruned.
[I 2021-05-10 19:11:08,021] Trial 2092 pruned.
[I 2021-05-10 19:11:08,701] Trial 2093 pruned.
[I 2021-05-10 19:11:09,387] Trial 2094 pruned.
[I 2021-05-10 19:11:09,776] Trial 2095 pruned.
[I 2021-05-10 19:11:10,479] Trial 2096 pruned.
[I 2021-05-10 19:11:11,155] Trial 2097 pruned.
[I 2021-05-10 19:11:11,531] Trial 2098 pruned.
[I 2021-05-10 19:11:12,233] Trial 2099 pruned.
[I 2021-05-10 19:11:12,923] Trial 2100 pruned.
[I 2021-05-10 19:11:13,076] Trial 2101 pruned.
[I 2021-05-10 19:11:13,467] Trial 2102 pruned.
[I 2021-05-10 19:11:14,448] Trial 2103 pruned.
[I 2021-05-10 19:11:15,150] Trial 2104 pruned.
[I 2021-05-10 19:11:15,524] Trial 2105 pruned.
[I 2021-05-10 19:11:16,202] Trial 2106 pruned.
[I 2021-05-10 19:11:16,903] Trial 2107 pruned.
[I 2021-05-10 19:11:17,291] Trial 2108 pruned.
[I 2021-05-10 19:11:17,974] Trial 2109 pruned.
[I 2021-05-10 19:11:18,663] Trial 2110 pruned.
[I 2021-05-10 19:11:19,035] Trial 2111 pruned.
[I 2021-05-10 19:11:19,264] Trial 2112 pruned.
[I 2021-05-10 19:11:19,950] Trial 2113 pruned.
[I 2021-05-10 19:12:21,613] Trial 2114 finished with value: 162.45213317871094 and parameters: {'lr': 0.002378878282656787, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 986, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:12:21,982] Trial 2115 pruned.
[I 2021-05-10 19:12:24,994] Trial 2116 pruned.
[I 2021-05-10 19:12:25,147] Trial 2117 pruned.
[I 2021-05-10 19:12:25,805] Trial 2118 pruned.
[I 2021-05-10 19:12:26,461] Trial 2119 pruned.
[I 2021-05-10 19:12:27,106] Trial 2120 pruned.
[I 2021-05-10 19:12:27,753] Trial 2121 pruned.
[I 2021-05-10 19:12:28,119] Trial 2122 pruned.
[I 2021-05-10 19:12:28,759] Trial 2123 pruned.
[I 2021-05-10 19:12:29,396] Trial 2124 pruned.
[I 2021-05-10 19:12:29,751] Trial 2125 pruned.
[I 2021-05-10 19:12:30,403] Trial 2126 pruned.
[I 2021-05-10 19:12:31,043] Trial 2127 pruned.
[I 2021-05-10 19:12:31,399] Trial 2128 pruned.
[I 2021-05-10 19:12:32,646] Trial 2129 pruned.
[I 2021-05-10 19:12:33,285] Trial 2130 pruned.
[I 2021-05-10 19:12:33,648] Trial 2131 pruned.
[I 2021-05-10 19:12:33,793] Trial 2132 pruned.
[I 2021-05-10 19:12:34,444] Trial 2133 pruned.
[I 2021-05-10 19:12:34,722] Trial 2134 pruned.
[I 2021-05-10 19:12:35,369] Trial 2135 pruned.
[I 2021-05-10 19:12:36,023] Trial 2136 pruned.
[I 2021-05-10 19:12:36,378] Trial 2137 pruned.
[I 2021-05-10 19:12:37,013] Trial 2138 pruned.
[I 2021-05-10 19:12:37,664] Trial 2139 pruned.
[I 2021-05-10 19:13:07,975] Trial 2140 finished with value: 160.2460479736328 and parameters: {'lr': 0.0033571898873299933, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:13:08,365] Trial 2141 pruned.
[I 2021-05-10 19:13:08,751] Trial 2142 pruned.
[I 2021-05-10 19:13:09,141] Trial 2143 pruned.
[I 2021-05-10 19:13:09,532] Trial 2144 pruned.
[I 2021-05-10 19:13:09,924] Trial 2145 pruned.
[I 2021-05-10 19:13:10,310] Trial 2146 pruned.
[I 2021-05-10 19:13:10,694] Trial 2147 pruned.
[I 2021-05-10 19:13:11,079] Trial 2148 pruned.
[I 2021-05-10 19:13:11,466] Trial 2149 pruned.
[I 2021-05-10 19:13:11,851] Trial 2150 pruned.
[I 2021-05-10 19:13:12,234] Trial 2151 pruned.
[I 2021-05-10 19:13:12,621] Trial 2152 pruned.
[I 2021-05-10 19:13:13,009] Trial 2153 pruned.
[I 2021-05-10 19:13:13,397] Trial 2154 pruned.
[I 2021-05-10 19:13:13,781] Trial 2155 pruned.
[I 2021-05-10 19:13:14,170] Trial 2156 pruned.
[I 2021-05-10 19:13:14,558] Trial 2157 pruned.
[I 2021-05-10 19:13:14,950] Trial 2158 pruned.
[I 2021-05-10 19:13:15,337] Trial 2159 pruned.
[I 2021-05-10 19:13:15,720] Trial 2160 pruned.
[I 2021-05-10 19:13:16,108] Trial 2161 pruned.
[I 2021-05-10 19:13:16,498] Trial 2162 pruned.
[I 2021-05-10 19:13:16,881] Trial 2163 pruned.
[I 2021-05-10 19:13:17,270] Trial 2164 pruned.
[I 2021-05-10 19:13:17,653] Trial 2165 pruned.
[I 2021-05-10 19:13:18,040] Trial 2166 pruned.
[I 2021-05-10 19:13:18,427] Trial 2167 pruned.
[I 2021-05-10 19:13:18,814] Trial 2168 pruned.
[I 2021-05-10 19:13:19,188] Trial 2169 pruned.
[I 2021-05-10 19:13:19,574] Trial 2170 pruned.
[I 2021-05-10 19:13:19,963] Trial 2171 pruned.
[I 2021-05-10 19:13:20,349] Trial 2172 pruned.
[I 2021-05-10 19:13:20,736] Trial 2173 pruned.
[I 2021-05-10 19:13:21,127] Trial 2174 pruned.
[I 2021-05-10 19:13:21,516] Trial 2175 pruned.
[I 2021-05-10 19:13:21,899] Trial 2176 pruned.
[I 2021-05-10 19:13:22,283] Trial 2177 pruned.
[I 2021-05-10 19:13:22,520] Trial 2178 pruned.
[I 2021-05-10 19:13:22,914] Trial 2179 pruned.
[I 2021-05-10 19:13:23,302] Trial 2180 pruned.
[I 2021-05-10 19:13:23,458] Trial 2181 pruned.
[I 2021-05-10 19:13:53,840] Trial 2182 finished with value: 160.65113830566406 and parameters: {'lr': 0.0023266644742492544, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1008, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:13:54,207] Trial 2183 pruned.
[I 2021-05-10 19:13:54,571] Trial 2184 pruned.
[I 2021-05-10 19:13:54,934] Trial 2185 pruned.
[I 2021-05-10 19:13:55,593] Trial 2186 pruned.
[I 2021-05-10 19:13:55,955] Trial 2187 pruned.
[I 2021-05-10 19:13:56,319] Trial 2188 pruned.
[I 2021-05-10 19:13:56,681] Trial 2189 pruned.
[I 2021-05-10 19:13:57,045] Trial 2190 pruned.
[I 2021-05-10 19:13:57,408] Trial 2191 pruned.
[I 2021-05-10 19:13:57,768] Trial 2192 pruned.
[I 2021-05-10 19:13:58,135] Trial 2193 pruned.
[I 2021-05-10 19:13:58,514] Trial 2194 pruned.
[I 2021-05-10 19:13:58,877] Trial 2195 pruned.
[I 2021-05-10 19:13:59,242] Trial 2196 pruned.
[I 2021-05-10 19:13:59,605] Trial 2197 pruned.
[I 2021-05-10 19:13:59,971] Trial 2198 pruned.
[I 2021-05-10 19:14:00,338] Trial 2199 pruned.
[I 2021-05-10 19:14:00,703] Trial 2200 pruned.
[I 2021-05-10 19:14:01,065] Trial 2201 pruned.
[I 2021-05-10 19:14:01,429] Trial 2202 pruned.
[I 2021-05-10 19:14:01,793] Trial 2203 pruned.
[I 2021-05-10 19:14:02,159] Trial 2204 pruned.
[I 2021-05-10 19:14:02,522] Trial 2205 pruned.
[I 2021-05-10 19:14:02,887] Trial 2206 pruned.
[I 2021-05-10 19:14:03,252] Trial 2207 pruned.
[I 2021-05-10 19:14:03,552] Trial 2208 pruned.
[I 2021-05-10 19:14:03,916] Trial 2209 pruned.
[I 2021-05-10 19:14:04,282] Trial 2210 pruned.
[I 2021-05-10 19:14:04,651] Trial 2211 pruned.
[I 2021-05-10 19:14:05,017] Trial 2212 pruned.
[I 2021-05-10 19:14:05,382] Trial 2213 pruned.
[I 2021-05-10 19:14:05,746] Trial 2214 pruned.
[I 2021-05-10 19:14:06,112] Trial 2215 pruned.
[I 2021-05-10 19:14:06,480] Trial 2216 pruned.
[I 2021-05-10 19:14:06,845] Trial 2217 pruned.
[I 2021-05-10 19:14:07,212] Trial 2218 pruned.
[I 2021-05-10 19:14:07,578] Trial 2219 pruned.
[I 2021-05-10 19:14:07,942] Trial 2220 pruned.
[I 2021-05-10 19:14:08,309] Trial 2221 pruned.
[I 2021-05-10 19:14:08,669] Trial 2222 pruned.
[I 2021-05-10 19:14:09,036] Trial 2223 pruned.
[I 2021-05-10 19:14:09,189] Trial 2224 pruned.
[I 2021-05-10 19:14:09,555] Trial 2225 pruned.
[I 2021-05-10 19:14:09,920] Trial 2226 pruned.
[I 2021-05-10 19:14:10,281] Trial 2227 pruned.
[I 2021-05-10 19:14:10,647] Trial 2228 pruned.
[I 2021-05-10 19:14:10,871] Trial 2229 pruned.
[I 2021-05-10 19:14:11,239] Trial 2230 pruned.
[I 2021-05-10 19:14:11,389] Trial 2231 pruned.
[I 2021-05-10 19:14:11,751] Trial 2232 pruned.
[I 2021-05-10 19:14:12,116] Trial 2233 pruned.
[I 2021-05-10 19:14:12,483] Trial 2234 pruned.
[I 2021-05-10 19:14:12,846] Trial 2235 pruned.
[I 2021-05-10 19:14:13,214] Trial 2236 pruned.
[I 2021-05-10 19:14:13,578] Trial 2237 pruned.
[I 2021-05-10 19:14:13,729] Trial 2238 pruned.
[I 2021-05-10 19:14:14,089] Trial 2239 pruned.
[I 2021-05-10 19:14:14,453] Trial 2240 pruned.
[I 2021-05-10 19:14:14,817] Trial 2241 pruned.
[I 2021-05-10 19:14:15,481] Trial 2242 pruned.
[I 2021-05-10 19:14:15,706] Trial 2243 pruned.
[I 2021-05-10 19:14:15,974] Trial 2244 pruned.
[I 2021-05-10 19:14:16,129] Trial 2245 pruned.
[I 2021-05-10 19:14:16,791] Trial 2246 pruned.
[I 2021-05-10 19:14:17,155] Trial 2247 pruned.
[I 2021-05-10 19:14:17,518] Trial 2248 pruned.
[I 2021-05-10 19:14:17,891] Trial 2249 pruned.
[I 2021-05-10 19:14:18,201] Trial 2250 pruned.
[I 2021-05-10 19:14:18,574] Trial 2251 pruned.
[I 2021-05-10 19:14:18,720] Trial 2252 pruned.
[I 2021-05-10 19:14:19,078] Trial 2253 pruned.
[I 2021-05-10 19:14:19,440] Trial 2254 pruned.
[I 2021-05-10 19:14:19,805] Trial 2255 pruned.
[I 2021-05-10 19:14:20,177] Trial 2256 pruned.
[I 2021-05-10 19:14:20,544] Trial 2257 pruned.
[I 2021-05-10 19:14:20,694] Trial 2258 pruned.
[I 2021-05-10 19:14:20,919] Trial 2259 pruned.
[I 2021-05-10 19:14:21,283] Trial 2260 pruned.
[I 2021-05-10 19:14:21,651] Trial 2261 pruned.
[I 2021-05-10 19:14:22,018] Trial 2262 pruned.
[I 2021-05-10 19:14:22,384] Trial 2263 pruned.
[I 2021-05-10 19:14:22,753] Trial 2264 pruned.
[I 2021-05-10 19:14:23,116] Trial 2265 pruned.
[I 2021-05-10 19:14:23,270] Trial 2266 pruned.
[I 2021-05-10 19:14:23,637] Trial 2267 pruned.
[I 2021-05-10 19:14:23,996] Trial 2268 pruned.
[I 2021-05-10 19:14:24,368] Trial 2269 pruned.
[I 2021-05-10 19:14:24,732] Trial 2270 pruned.
[I 2021-05-10 19:14:25,097] Trial 2271 pruned.
[I 2021-05-10 19:14:25,321] Trial 2272 pruned.
[I 2021-05-10 19:14:25,475] Trial 2273 pruned.
[I 2021-05-10 19:14:25,829] Trial 2274 pruned.
[I 2021-05-10 19:14:26,193] Trial 2275 pruned.
[I 2021-05-10 19:14:26,561] Trial 2276 pruned.
[I 2021-05-10 19:14:26,922] Trial 2277 pruned.
[I 2021-05-10 19:14:27,286] Trial 2278 pruned.
[I 2021-05-10 19:14:27,651] Trial 2279 pruned.
[I 2021-05-10 19:14:27,805] Trial 2280 pruned.
[I 2021-05-10 19:14:28,459] Trial 2281 pruned.
[I 2021-05-10 19:14:28,822] Trial 2282 pruned.
[I 2021-05-10 19:14:29,488] Trial 2283 pruned.
[I 2021-05-10 19:14:29,856] Trial 2284 pruned.
[I 2021-05-10 19:14:30,358] Trial 2285 pruned.
[I 2021-05-10 19:14:31,016] Trial 2286 pruned.
[I 2021-05-10 19:14:31,386] Trial 2287 pruned.
[I 2021-05-10 19:14:32,044] Trial 2288 pruned.
[I 2021-05-10 19:14:32,407] Trial 2289 pruned.
[I 2021-05-10 19:14:33,060] Trial 2290 pruned.
[I 2021-05-10 19:14:33,283] Trial 2291 pruned.
[I 2021-05-10 19:14:33,929] Trial 2292 pruned.
[I 2021-05-10 19:14:34,294] Trial 2293 pruned.
[I 2021-05-10 19:14:34,959] Trial 2294 pruned.
[I 2021-05-10 19:14:35,120] Trial 2295 pruned.
[I 2021-05-10 19:14:35,501] Trial 2296 pruned.
[I 2021-05-10 19:14:36,178] Trial 2297 pruned.
[I 2021-05-10 19:14:36,565] Trial 2298 pruned.
[I 2021-05-10 19:14:37,266] Trial 2299 pruned.
[I 2021-05-10 19:14:39,219] Trial 2300 pruned.
[I 2021-05-10 19:14:39,602] Trial 2301 pruned.
[I 2021-05-10 19:14:40,298] Trial 2302 pruned.
[I 2021-05-10 19:14:40,687] Trial 2303 pruned.
[I 2021-05-10 19:14:41,384] Trial 2304 pruned.
[I 2021-05-10 19:14:41,779] Trial 2305 pruned.
[I 2021-05-10 19:14:42,467] Trial 2306 pruned.
[I 2021-05-10 19:14:43,175] Trial 2307 pruned.
[I 2021-05-10 19:14:43,573] Trial 2308 pruned.
[I 2021-05-10 19:14:43,731] Trial 2309 pruned.
[I 2021-05-10 19:14:44,421] Trial 2310 pruned.
[I 2021-05-10 19:14:44,814] Trial 2311 pruned.
[I 2021-05-10 19:14:45,496] Trial 2312 pruned.
[I 2021-05-10 19:14:45,891] Trial 2313 pruned.
[I 2021-05-10 19:14:47,845] Trial 2314 pruned.
[I 2021-05-10 19:14:48,537] Trial 2315 pruned.
[I 2021-05-10 19:14:48,930] Trial 2316 pruned.
[I 2021-05-10 19:14:49,618] Trial 2317 pruned.
[I 2021-05-10 19:14:49,986] Trial 2318 pruned.
[I 2021-05-10 19:14:50,641] Trial 2319 pruned.
[I 2021-05-10 19:14:50,864] Trial 2320 pruned.
[I 2021-05-10 19:14:51,527] Trial 2321 pruned.
[I 2021-05-10 19:14:51,898] Trial 2322 pruned.
[I 2021-05-10 19:14:52,052] Trial 2323 pruned.
[I 2021-05-10 19:14:52,737] Trial 2324 pruned.
[I 2021-05-10 19:14:53,116] Trial 2325 pruned.
[I 2021-05-10 19:14:53,779] Trial 2326 pruned.
[I 2021-05-10 19:14:54,065] Trial 2327 pruned.
[I 2021-05-10 19:14:54,725] Trial 2328 pruned.
[I 2021-05-10 19:14:55,378] Trial 2329 pruned.
[I 2021-05-10 19:14:55,753] Trial 2330 pruned.
[I 2021-05-10 19:14:56,413] Trial 2331 pruned.
[I 2021-05-10 19:14:56,782] Trial 2332 pruned.
[I 2021-05-10 19:14:57,446] Trial 2333 pruned.
[I 2021-05-10 19:14:57,814] Trial 2334 pruned.
[I 2021-05-10 19:14:58,480] Trial 2335 pruned.
[I 2021-05-10 19:14:59,134] Trial 2336 pruned.
[I 2021-05-10 19:14:59,513] Trial 2337 pruned.
[I 2021-05-10 19:14:59,667] Trial 2338 pruned.
[I 2021-05-10 19:15:00,337] Trial 2339 pruned.
[I 2021-05-10 19:15:00,724] Trial 2340 pruned.
[I 2021-05-10 19:15:01,397] Trial 2341 pruned.
[I 2021-05-10 19:15:01,762] Trial 2342 pruned.
[I 2021-05-10 19:15:02,423] Trial 2343 pruned.
[I 2021-05-10 19:15:03,075] Trial 2344 pruned.
[I 2021-05-10 19:15:03,441] Trial 2345 pruned.
[I 2021-05-10 19:15:04,114] Trial 2346 pruned.
[I 2021-05-10 19:15:04,516] Trial 2347 pruned.
[I 2021-05-10 19:15:05,009] Trial 2348 pruned.
[I 2021-05-10 19:15:05,249] Trial 2349 pruned.
[I 2021-05-10 19:15:05,936] Trial 2350 pruned.
[I 2021-05-10 19:15:06,327] Trial 2351 pruned.
[I 2021-05-10 19:15:06,491] Trial 2352 pruned.
[I 2021-05-10 19:15:07,770] Trial 2353 pruned.
[I 2021-05-10 19:15:08,164] Trial 2354 pruned.
[I 2021-05-10 19:15:08,872] Trial 2355 pruned.
[I 2021-05-10 19:15:09,248] Trial 2356 pruned.
[I 2021-05-10 19:15:09,949] Trial 2357 pruned.
[I 2021-05-10 19:15:10,630] Trial 2358 pruned.
[I 2021-05-10 19:15:11,014] Trial 2359 pruned.
[I 2021-05-10 19:15:11,697] Trial 2360 pruned.
[I 2021-05-10 19:15:12,075] Trial 2361 pruned.
[I 2021-05-10 19:15:12,658] Trial 2362 pruned.
[I 2021-05-10 19:15:13,041] Trial 2363 pruned.
[I 2021-05-10 19:15:13,742] Trial 2364 pruned.
[I 2021-05-10 19:15:15,036] Trial 2365 pruned.
[I 2021-05-10 19:15:15,437] Trial 2366 pruned.
[I 2021-05-10 19:15:15,596] Trial 2367 pruned.
[I 2021-05-10 19:15:16,304] Trial 2368 pruned.
[I 2021-05-10 19:15:16,700] Trial 2369 pruned.
[I 2021-05-10 19:15:17,388] Trial 2370 pruned.
[I 2021-05-10 19:15:19,669] Trial 2371 pruned.
[I 2021-05-10 19:15:20,382] Trial 2372 pruned.
[I 2021-05-10 19:15:21,074] Trial 2373 pruned.
[I 2021-05-10 19:15:21,467] Trial 2374 pruned.
[I 2021-05-10 19:15:22,155] Trial 2375 pruned.
[I 2021-05-10 19:15:22,545] Trial 2376 pruned.
[I 2021-05-10 19:15:23,793] Trial 2377 pruned.
[I 2021-05-10 19:15:24,457] Trial 2378 pruned.
[I 2021-05-10 19:15:24,824] Trial 2379 pruned.
[I 2021-05-10 19:15:25,051] Trial 2380 pruned.
[I 2021-05-10 19:15:25,713] Trial 2381 pruned.
[I 2021-05-10 19:15:25,869] Trial 2382 pruned.
[I 2021-05-10 19:15:26,235] Trial 2383 pruned.
[I 2021-05-10 19:15:26,899] Trial 2384 pruned.
[I 2021-05-10 19:15:27,272] Trial 2385 pruned.
[I 2021-05-10 19:15:27,941] Trial 2386 pruned.
[I 2021-05-10 19:15:28,599] Trial 2387 pruned.
[I 2021-05-10 19:15:28,975] Trial 2388 pruned.
[I 2021-05-10 19:15:29,641] Trial 2389 pruned.
[I 2021-05-10 19:15:30,008] Trial 2390 pruned.
[I 2021-05-10 19:15:30,675] Trial 2391 pruned.
[I 2021-05-10 19:15:31,043] Trial 2392 pruned.
[I 2021-05-10 19:15:31,700] Trial 2393 pruned.
[I 2021-05-10 19:15:32,363] Trial 2394 pruned.
[I 2021-05-10 19:15:32,731] Trial 2395 pruned.
[I 2021-05-10 19:15:32,885] Trial 2396 pruned.
[I 2021-05-10 19:15:37,816] Trial 2397 pruned.
[I 2021-05-10 19:15:38,183] Trial 2398 pruned.
[I 2021-05-10 19:15:38,849] Trial 2399 pruned.
[I 2021-05-10 19:15:39,219] Trial 2400 pruned.
[I 2021-05-10 19:15:39,873] Trial 2401 pruned.
[I 2021-05-10 19:15:40,525] Trial 2402 pruned.
[I 2021-05-10 19:15:40,895] Trial 2403 pruned.
[I 2021-05-10 19:16:41,517] Trial 2404 finished with value: 160.7152557373047 and parameters: {'lr': 0.002374939292122102, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1008, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:16:42,206] Trial 2405 pruned.
[I 2021-05-10 19:16:42,593] Trial 2406 pruned.
[I 2021-05-10 19:16:43,291] Trial 2407 pruned.
[I 2021-05-10 19:16:43,681] Trial 2408 pruned.
[I 2021-05-10 19:16:44,371] Trial 2409 pruned.
[I 2021-05-10 19:16:44,611] Trial 2410 pruned.
[I 2021-05-10 19:16:44,779] Trial 2411 pruned.
[I 2021-05-10 19:16:45,468] Trial 2412 pruned.
[I 2021-05-10 19:16:45,854] Trial 2413 pruned.
[I 2021-05-10 19:17:48,258] Trial 2414 finished with value: 162.91725158691406 and parameters: {'lr': 0.0024299462379684016, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 952, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:17:48,654] Trial 2415 pruned.
[I 2021-05-10 19:17:49,360] Trial 2416 pruned.
[I 2021-05-10 19:17:49,757] Trial 2417 pruned.
[I 2021-05-10 19:17:50,465] Trial 2418 pruned.
[I 2021-05-10 19:17:51,171] Trial 2419 pruned.
[I 2021-05-10 19:17:51,566] Trial 2420 pruned.
[I 2021-05-10 19:18:55,025] Trial 2421 finished with value: 163.18556213378906 and parameters: {'lr': 0.0021700098833548126, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:18:55,416] Trial 2422 pruned.
[I 2021-05-10 19:18:56,098] Trial 2423 pruned.
[I 2021-05-10 19:18:59,231] Trial 2424 pruned.
[I 2021-05-10 19:18:59,617] Trial 2425 pruned.
[I 2021-05-10 19:18:59,787] Trial 2426 pruned.
[I 2021-05-10 19:19:00,520] Trial 2427 pruned.
[I 2021-05-10 19:19:00,934] Trial 2428 pruned.
[I 2021-05-10 19:19:02,899] Trial 2429 pruned.
[I 2021-05-10 19:19:34,049] Trial 2430 finished with value: 170.80206298828125 and parameters: {'lr': 0.0025964879723160894, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:19:38,599] Trial 2431 pruned.
[I 2021-05-10 19:19:39,298] Trial 2432 pruned.
[I 2021-05-10 19:19:39,695] Trial 2433 pruned.
[I 2021-05-10 19:19:40,391] Trial 2434 pruned.
[I 2021-05-10 19:19:40,784] Trial 2435 pruned.
[I 2021-05-10 19:19:41,511] Trial 2436 pruned.
[I 2021-05-10 19:19:41,896] Trial 2437 pruned.
[I 2021-05-10 19:19:42,571] Trial 2438 pruned.
[I 2021-05-10 19:19:42,738] Trial 2439 pruned.
[I 2021-05-10 19:19:43,419] Trial 2440 pruned.
[I 2021-05-10 19:19:43,797] Trial 2441 pruned.
[I 2021-05-10 19:19:44,033] Trial 2442 pruned.
[I 2021-05-10 19:19:44,718] Trial 2443 pruned.
[I 2021-05-10 19:19:45,110] Trial 2444 pruned.
[I 2021-05-10 19:19:45,808] Trial 2445 pruned.
[I 2021-05-10 19:19:46,206] Trial 2446 pruned.
[I 2021-05-10 19:19:47,525] Trial 2447 pruned.
[I 2021-05-10 19:19:48,243] Trial 2448 pruned.
[I 2021-05-10 19:19:48,643] Trial 2449 pruned.
[I 2021-05-10 19:19:49,334] Trial 2450 pruned.
[I 2021-05-10 19:19:49,721] Trial 2451 pruned.
[I 2021-05-10 19:19:50,411] Trial 2452 pruned.
[I 2021-05-10 19:19:51,088] Trial 2453 pruned.
[I 2021-05-10 19:19:51,468] Trial 2454 pruned.
[I 2021-05-10 19:19:51,635] Trial 2455 pruned.
[I 2021-05-10 19:19:52,311] Trial 2456 pruned.
[I 2021-05-10 19:19:52,698] Trial 2457 pruned.
[I 2021-05-10 19:19:53,392] Trial 2458 pruned.
[I 2021-05-10 19:19:53,778] Trial 2459 pruned.
[I 2021-05-10 19:19:54,452] Trial 2460 pruned.
[I 2021-05-10 19:19:55,743] Trial 2461 pruned.
[I 2021-05-10 19:19:56,123] Trial 2462 pruned.
[I 2021-05-10 19:19:56,794] Trial 2463 pruned.
[I 2021-05-10 19:19:57,171] Trial 2464 pruned.
[I 2021-05-10 19:19:57,849] Trial 2465 pruned.
[I 2021-05-10 19:19:58,085] Trial 2466 pruned.
[I 2021-05-10 19:19:58,466] Trial 2467 pruned.
[I 2021-05-10 19:19:59,143] Trial 2468 pruned.
[I 2021-05-10 19:19:59,310] Trial 2469 pruned.
[I 2021-05-10 19:19:59,976] Trial 2470 pruned.
[I 2021-05-10 19:20:00,360] Trial 2471 pruned.
[I 2021-05-10 19:20:01,038] Trial 2472 pruned.
[I 2021-05-10 19:20:01,419] Trial 2473 pruned.
[I 2021-05-10 19:20:01,937] Trial 2474 pruned.
[I 2021-05-10 19:20:02,319] Trial 2475 pruned.
[I 2021-05-10 19:20:02,994] Trial 2476 pruned.
[I 2021-05-10 19:20:03,670] Trial 2477 pruned.
[I 2021-05-10 19:20:04,054] Trial 2478 pruned.
[I 2021-05-10 19:20:04,731] Trial 2479 pruned.
[I 2021-05-10 19:20:05,111] Trial 2480 pruned.
[I 2021-05-10 19:20:05,796] Trial 2481 pruned.
[I 2021-05-10 19:20:06,478] Trial 2482 pruned.
[I 2021-05-10 19:20:06,857] Trial 2483 pruned.
[I 2021-05-10 19:20:07,022] Trial 2484 pruned.
[I 2021-05-10 19:20:07,701] Trial 2485 pruned.
[I 2021-05-10 19:20:08,098] Trial 2486 pruned.
[I 2021-05-10 19:20:08,832] Trial 2487 pruned.
[I 2021-05-10 19:20:09,242] Trial 2488 pruned.
[I 2021-05-10 19:20:10,004] Trial 2489 pruned.
[I 2021-05-10 19:20:10,701] Trial 2490 pruned.
[I 2021-05-10 19:20:11,085] Trial 2491 pruned.
[I 2021-05-10 19:20:11,802] Trial 2492 pruned.
[I 2021-05-10 19:20:12,197] Trial 2493 pruned.
[I 2021-05-10 19:20:12,903] Trial 2494 pruned.
[I 2021-05-10 19:20:13,305] Trial 2495 pruned.
[I 2021-05-10 19:20:13,545] Trial 2496 pruned.
[I 2021-05-10 19:20:14,254] Trial 2497 pruned.
[I 2021-05-10 19:20:14,425] Trial 2498 pruned.
[I 2021-05-10 19:21:16,188] Trial 2499 finished with value: 167.7655487060547 and parameters: {'lr': 0.001182635389005816, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1004, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:21:16,563] Trial 2500 pruned.
[I 2021-05-10 19:21:17,257] Trial 2501 pruned.
[I 2021-05-10 19:21:17,631] Trial 2502 pruned.
[I 2021-05-10 19:21:18,300] Trial 2503 pruned.
[I 2021-05-10 19:21:18,676] Trial 2504 pruned.
[I 2021-05-10 19:21:19,335] Trial 2505 pruned.
[I 2021-05-10 19:21:19,999] Trial 2506 pruned.
[I 2021-05-10 19:21:20,377] Trial 2507 pruned.
[I 2021-05-10 19:21:21,056] Trial 2508 pruned.
[I 2021-05-10 19:21:21,428] Trial 2509 pruned.
[I 2021-05-10 19:21:22,091] Trial 2510 pruned.
[I 2021-05-10 19:21:22,749] Trial 2511 pruned.
[I 2021-05-10 19:21:23,119] Trial 2512 pruned.
[I 2021-05-10 19:21:23,277] Trial 2513 pruned.
[I 2021-05-10 19:21:23,833] Trial 2514 pruned.
[I 2021-05-10 19:21:24,209] Trial 2515 pruned.
[I 2021-05-10 19:21:24,870] Trial 2516 pruned.
[I 2021-05-10 19:21:25,257] Trial 2517 pruned.
[I 2021-05-10 19:21:25,921] Trial 2518 pruned.
[I 2021-05-10 19:21:26,582] Trial 2519 pruned.
[I 2021-05-10 19:21:26,855] Trial 2520 pruned.
[I 2021-05-10 19:21:27,524] Trial 2521 pruned.
[I 2021-05-10 19:21:27,896] Trial 2522 pruned.
[I 2021-05-10 19:21:28,562] Trial 2523 pruned.
[I 2021-05-10 19:21:28,789] Trial 2524 pruned.
[I 2021-05-10 19:21:29,160] Trial 2525 pruned.
[I 2021-05-10 19:21:29,830] Trial 2526 pruned.
[I 2021-05-10 19:21:29,990] Trial 2527 pruned.
[I 2021-05-10 19:21:30,639] Trial 2528 pruned.
[I 2021-05-10 19:21:31,001] Trial 2529 pruned.
[I 2021-05-10 19:21:31,664] Trial 2530 pruned.
[I 2021-05-10 19:21:32,030] Trial 2531 pruned.
[I 2021-05-10 19:21:32,690] Trial 2532 pruned.
[I 2021-05-10 19:21:33,072] Trial 2533 pruned.
[I 2021-05-10 19:21:33,732] Trial 2534 pruned.
[I 2021-05-10 19:21:34,405] Trial 2535 pruned.
[I 2021-05-10 19:21:34,783] Trial 2536 pruned.
[I 2021-05-10 19:21:36,612] Trial 2537 pruned.
[I 2021-05-10 19:21:36,984] Trial 2538 pruned.
[I 2021-05-10 19:21:37,640] Trial 2539 pruned.
[I 2021-05-10 19:21:38,313] Trial 2540 pruned.
[I 2021-05-10 19:21:38,684] Trial 2541 pruned.
[I 2021-05-10 19:21:38,839] Trial 2542 pruned.
[I 2021-05-10 19:21:39,506] Trial 2543 pruned.
[I 2021-05-10 19:21:39,884] Trial 2544 pruned.
[I 2021-05-10 19:21:40,539] Trial 2545 pruned.
[I 2021-05-10 19:21:40,898] Trial 2546 pruned.
[I 2021-05-10 19:21:41,560] Trial 2547 pruned.
[I 2021-05-10 19:21:42,232] Trial 2548 pruned.
[I 2021-05-10 19:21:42,609] Trial 2549 pruned.
[I 2021-05-10 19:21:43,274] Trial 2550 pruned.
[I 2021-05-10 19:21:43,640] Trial 2551 pruned.
[I 2021-05-10 19:21:45,469] Trial 2552 pruned.
[I 2021-05-10 19:21:45,846] Trial 2553 pruned.
[I 2021-05-10 19:21:46,078] Trial 2554 pruned.
[I 2021-05-10 19:21:46,744] Trial 2555 pruned.
[I 2021-05-10 19:21:46,903] Trial 2556 pruned.
[I 2021-05-10 19:21:47,569] Trial 2557 pruned.
[I 2021-05-10 19:21:47,935] Trial 2558 pruned.
[I 2021-05-10 19:21:48,584] Trial 2559 pruned.
[I 2021-05-10 19:21:48,957] Trial 2560 pruned.
[I 2021-05-10 19:21:49,504] Trial 2561 pruned.
[I 2021-05-10 19:22:20,112] Trial 2562 finished with value: 167.32757568359375 and parameters: {'lr': 0.003397594768703825, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:22:20,792] Trial 2563 pruned.
[I 2021-05-10 19:22:21,438] Trial 2564 pruned.
[I 2021-05-10 19:22:21,811] Trial 2565 pruned.
[I 2021-05-10 19:22:22,470] Trial 2566 pruned.
[I 2021-05-10 19:22:22,847] Trial 2567 pruned.
[I 2021-05-10 19:22:23,518] Trial 2568 pruned.
[I 2021-05-10 19:22:24,202] Trial 2569 pruned.
[I 2021-05-10 19:22:24,578] Trial 2570 pruned.
[I 2021-05-10 19:22:24,739] Trial 2571 pruned.
[I 2021-05-10 19:22:25,396] Trial 2572 pruned.
[I 2021-05-10 19:22:25,760] Trial 2573 pruned.
[I 2021-05-10 19:22:26,428] Trial 2574 pruned.
[I 2021-05-10 19:22:26,800] Trial 2575 pruned.
[I 2021-05-10 19:22:27,456] Trial 2576 pruned.
[I 2021-05-10 19:22:28,119] Trial 2577 pruned.
[I 2021-05-10 19:22:28,492] Trial 2578 pruned.
[I 2021-05-10 19:22:29,156] Trial 2579 pruned.
[I 2021-05-10 19:22:29,538] Trial 2580 pruned.
[I 2021-05-10 19:22:30,191] Trial 2581 pruned.
[I 2021-05-10 19:22:30,580] Trial 2582 pruned.
[I 2021-05-10 19:22:31,292] Trial 2583 pruned.
[I 2021-05-10 19:22:31,545] Trial 2584 pruned.
[I 2021-05-10 19:22:32,243] Trial 2585 pruned.
[I 2021-05-10 19:22:32,635] Trial 2586 pruned.
[I 2021-05-10 19:22:32,803] Trial 2587 pruned.
[I 2021-05-10 19:22:33,492] Trial 2588 pruned.
[I 2021-05-10 19:22:33,890] Trial 2589 pruned.
[I 2021-05-10 19:22:34,596] Trial 2590 pruned.
[I 2021-05-10 19:22:34,995] Trial 2591 pruned.
[I 2021-05-10 19:22:35,688] Trial 2592 pruned.
[I 2021-05-10 19:22:36,390] Trial 2593 pruned.
[I 2021-05-10 19:22:36,789] Trial 2594 pruned.
[I 2021-05-10 19:22:37,479] Trial 2595 pruned.
[I 2021-05-10 19:22:37,876] Trial 2596 pruned.
[I 2021-05-10 19:22:38,462] Trial 2597 pruned.
[I 2021-05-10 19:22:39,147] Trial 2598 pruned.
[I 2021-05-10 19:22:39,551] Trial 2599 pruned.
[I 2021-05-10 19:22:39,723] Trial 2600 pruned.
[I 2021-05-10 19:22:40,431] Trial 2601 pruned.
[I 2021-05-10 19:22:40,828] Trial 2602 pruned.
[I 2021-05-10 19:22:41,515] Trial 2603 pruned.
[I 2021-05-10 19:22:41,915] Trial 2604 pruned.
[I 2021-05-10 19:22:42,615] Trial 2605 pruned.
[I 2021-05-10 19:22:43,321] Trial 2606 pruned.
[I 2021-05-10 19:22:43,723] Trial 2607 pruned.
[I 2021-05-10 19:22:44,432] Trial 2608 pruned.
[I 2021-05-10 19:22:44,835] Trial 2609 pruned.
[I 2021-05-10 19:22:45,526] Trial 2610 pruned.
[I 2021-05-10 19:22:45,923] Trial 2611 pruned.
[I 2021-05-10 19:22:46,629] Trial 2612 pruned.
[I 2021-05-10 19:22:46,873] Trial 2613 pruned.
[I 2021-05-10 19:22:47,568] Trial 2614 pruned.
[I 2021-05-10 19:22:47,744] Trial 2615 pruned.
[I 2021-05-10 19:22:48,152] Trial 2616 pruned.
[I 2021-05-10 19:22:48,641] Trial 2617 pruned.
[I 2021-05-10 19:22:49,043] Trial 2618 pruned.
[I 2021-05-10 19:22:49,740] Trial 2619 pruned.
[I 2021-05-10 19:22:50,141] Trial 2620 pruned.
[I 2021-05-10 19:22:50,836] Trial 2621 pruned.
[I 2021-05-10 19:22:51,536] Trial 2622 pruned.
[I 2021-05-10 19:22:51,929] Trial 2623 pruned.
[I 2021-05-10 19:22:53,286] Trial 2624 pruned.
[I 2021-05-10 19:22:53,698] Trial 2625 pruned.
[I 2021-05-10 19:22:54,408] Trial 2626 pruned.
[I 2021-05-10 19:22:55,112] Trial 2627 pruned.
[I 2021-05-10 19:22:55,510] Trial 2628 pruned.
[I 2021-05-10 19:22:55,679] Trial 2629 pruned.
[I 2021-05-10 19:22:56,390] Trial 2630 pruned.
[I 2021-05-10 19:22:56,789] Trial 2631 pruned.
[I 2021-05-10 19:22:57,512] Trial 2632 pruned.
[I 2021-05-10 19:22:57,907] Trial 2633 pruned.
[I 2021-05-10 19:22:58,613] Trial 2634 pruned.
[I 2021-05-10 19:22:59,324] Trial 2635 pruned.
[I 2021-05-10 19:22:59,727] Trial 2636 pruned.
[I 2021-05-10 19:23:00,436] Trial 2637 pruned.
[I 2021-05-10 19:23:00,862] Trial 2638 pruned.
[I 2021-05-10 19:23:01,571] Trial 2639 pruned.
[I 2021-05-10 19:23:01,962] Trial 2640 pruned.
[I 2021-05-10 19:23:02,673] Trial 2641 pruned.
[I 2021-05-10 19:23:02,916] Trial 2642 pruned.
[I 2021-05-10 19:23:03,602] Trial 2643 pruned.
[I 2021-05-10 19:23:03,738] Trial 2644 pruned.
[I 2021-05-10 19:23:04,144] Trial 2645 pruned.
[I 2021-05-10 19:23:04,856] Trial 2646 pruned.
[I 2021-05-10 19:23:05,246] Trial 2647 pruned.
[I 2021-05-10 19:24:04,085] Trial 2648 finished with value: 164.8304443359375 and parameters: {'lr': 0.003173028605138649, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 696, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:24:04,459] Trial 2649 pruned.
[I 2021-05-10 19:24:05,129] Trial 2650 pruned.
[I 2021-05-10 19:24:05,799] Trial 2651 pruned.
[I 2021-05-10 19:24:06,183] Trial 2652 pruned.
[I 2021-05-10 19:24:06,853] Trial 2653 pruned.
[I 2021-05-10 19:24:07,228] Trial 2654 pruned.
[I 2021-05-10 19:24:07,894] Trial 2655 pruned.
[I 2021-05-10 19:24:08,555] Trial 2656 pruned.
[I 2021-05-10 19:24:08,923] Trial 2657 pruned.
[I 2021-05-10 19:24:09,086] Trial 2658 pruned.
[I 2021-05-10 19:24:10,345] Trial 2659 pruned.
[I 2021-05-10 19:24:10,717] Trial 2660 pruned.
[I 2021-05-10 19:24:16,005] Trial 2661 pruned.
[I 2021-05-10 19:24:16,388] Trial 2662 pruned.
[I 2021-05-10 19:24:17,049] Trial 2663 pruned.
[I 2021-05-10 19:25:16,274] Trial 2664 finished with value: 160.87799072265625 and parameters: {'lr': 0.0016796871682017648, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 990, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:25:16,655] Trial 2665 pruned.
[I 2021-05-10 19:25:17,338] Trial 2666 pruned.
[I 2021-05-10 19:25:17,716] Trial 2667 pruned.
[I 2021-05-10 19:25:21,923] Trial 2668 pruned.
[I 2021-05-10 19:25:22,476] Trial 2669 pruned.
[I 2021-05-10 19:25:22,854] Trial 2670 pruned.
[I 2021-05-10 19:25:23,525] Trial 2671 pruned.
[I 2021-05-10 19:25:23,763] Trial 2672 pruned.
[I 2021-05-10 19:25:23,933] Trial 2673 pruned.
[I 2021-05-10 19:25:24,314] Trial 2674 pruned.
[I 2021-05-10 19:25:24,989] Trial 2675 pruned.
[I 2021-05-10 19:25:25,375] Trial 2676 pruned.
[I 2021-05-10 19:25:26,041] Trial 2677 pruned.
[I 2021-05-10 19:25:26,716] Trial 2678 pruned.
[I 2021-05-10 19:25:27,100] Trial 2679 pruned.
[I 2021-05-10 19:25:27,760] Trial 2680 pruned.
[I 2021-05-10 19:25:28,141] Trial 2681 pruned.
[I 2021-05-10 19:25:28,811] Trial 2682 pruned.
[I 2021-05-10 19:25:29,195] Trial 2683 pruned.
[I 2021-05-10 19:25:29,861] Trial 2684 pruned.
[I 2021-05-10 19:25:30,538] Trial 2685 pruned.
[I 2021-05-10 19:25:30,918] Trial 2686 pruned.
[I 2021-05-10 19:25:31,087] Trial 2687 pruned.
[I 2021-05-10 19:25:31,755] Trial 2688 pruned.
[I 2021-05-10 19:25:32,133] Trial 2689 pruned.
[I 2021-05-10 19:25:32,809] Trial 2690 pruned.
[I 2021-05-10 19:25:33,472] Trial 2691 pruned.
[I 2021-05-10 19:25:33,848] Trial 2692 pruned.
[I 2021-05-10 19:25:34,531] Trial 2693 pruned.
[I 2021-05-10 19:25:34,917] Trial 2694 pruned.
[I 2021-05-10 19:25:35,588] Trial 2695 pruned.
[I 2021-05-10 19:25:35,972] Trial 2696 pruned.
[I 2021-05-10 19:25:36,630] Trial 2697 pruned.
[I 2021-05-10 19:25:37,299] Trial 2698 pruned.
[I 2021-05-10 19:25:37,681] Trial 2699 pruned.
[I 2021-05-10 19:25:38,355] Trial 2700 pruned.
[I 2021-05-10 19:25:38,589] Trial 2701 pruned.
[I 2021-05-10 19:25:38,757] Trial 2702 pruned.
[I 2021-05-10 19:25:39,134] Trial 2703 pruned.
[I 2021-05-10 19:25:39,813] Trial 2704 pruned.
[I 2021-05-10 19:25:40,191] Trial 2705 pruned.
[I 2021-05-10 19:25:40,879] Trial 2706 pruned.
[I 2021-05-10 19:25:41,390] Trial 2707 pruned.
[I 2021-05-10 19:25:41,765] Trial 2708 pruned.
[I 2021-05-10 19:25:42,436] Trial 2709 pruned.
[I 2021-05-10 19:25:42,811] Trial 2710 pruned.
[I 2021-05-10 19:25:43,488] Trial 2711 pruned.
[I 2021-05-10 19:25:43,862] Trial 2712 pruned.
[I 2021-05-10 19:25:44,546] Trial 2713 pruned.
[I 2021-05-10 19:25:45,221] Trial 2714 pruned.
[I 2021-05-10 19:25:45,596] Trial 2715 pruned.
[I 2021-05-10 19:25:46,272] Trial 2716 pruned.
[I 2021-05-10 19:25:46,439] Trial 2717 pruned.
[I 2021-05-10 19:25:46,819] Trial 2718 pruned.
[I 2021-05-10 19:25:47,487] Trial 2719 pruned.
[I 2021-05-10 19:25:48,736] Trial 2720 pruned.
[I 2021-05-10 19:25:49,109] Trial 2721 pruned.
[I 2021-05-10 19:25:49,771] Trial 2722 pruned.
[I 2021-05-10 19:25:50,163] Trial 2723 pruned.
[I 2021-05-10 19:25:50,837] Trial 2724 pruned.
[I 2021-05-10 19:25:51,215] Trial 2725 pruned.
[I 2021-05-10 19:25:51,887] Trial 2726 pruned.
[I 2021-05-10 19:25:52,550] Trial 2727 pruned.
[I 2021-05-10 19:25:52,927] Trial 2728 pruned.
[I 2021-05-10 19:25:53,168] Trial 2729 pruned.
[I 2021-05-10 19:25:53,835] Trial 2730 pruned.
[I 2021-05-10 19:25:54,004] Trial 2731 pruned.
[I 2021-05-10 19:25:54,383] Trial 2732 pruned.
[I 2021-05-10 19:25:55,061] Trial 2733 pruned.
[I 2021-05-10 19:25:55,438] Trial 2734 pruned.
[I 2021-05-10 19:25:56,112] Trial 2735 pruned.
[I 2021-05-10 19:25:56,778] Trial 2736 pruned.
[I 2021-05-10 19:25:57,152] Trial 2737 pruned.
[I 2021-05-10 19:25:57,664] Trial 2738 pruned.
[I 2021-05-10 19:25:58,039] Trial 2739 pruned.
[I 2021-05-10 19:25:58,524] Trial 2740 pruned.
[I 2021-05-10 19:25:58,906] Trial 2741 pruned.
[I 2021-05-10 19:25:59,567] Trial 2742 pruned.
[I 2021-05-10 19:26:00,240] Trial 2743 pruned.
[I 2021-05-10 19:26:00,620] Trial 2744 pruned.
[I 2021-05-10 19:26:01,299] Trial 2745 pruned.
[I 2021-05-10 19:26:01,458] Trial 2746 pruned.
[I 2021-05-10 19:26:01,837] Trial 2747 pruned.
[I 2021-05-10 19:26:02,501] Trial 2748 pruned.
[I 2021-05-10 19:26:03,181] Trial 2749 pruned.
[I 2021-05-10 19:26:03,562] Trial 2750 pruned.
[I 2021-05-10 19:26:04,226] Trial 2751 pruned.
[I 2021-05-10 19:26:04,572] Trial 2752 pruned.
[I 2021-05-10 19:26:05,239] Trial 2753 pruned.
[I 2021-05-10 19:26:05,619] Trial 2754 pruned.
[I 2021-05-10 19:26:06,282] Trial 2755 pruned.
[I 2021-05-10 19:26:06,954] Trial 2756 pruned.
[I 2021-05-10 19:26:07,326] Trial 2757 pruned.
[I 2021-05-10 19:26:08,012] Trial 2758 pruned.
[I 2021-05-10 19:26:08,349] Trial 2759 pruned.
[I 2021-05-10 19:26:08,590] Trial 2760 pruned.
[I 2021-05-10 19:26:08,753] Trial 2761 pruned.
[I 2021-05-10 19:26:09,412] Trial 2762 pruned.
[I 2021-05-10 19:26:09,790] Trial 2763 pruned.
[I 2021-05-10 19:26:10,392] Trial 2764 pruned.
[I 2021-05-10 19:26:11,060] Trial 2765 pruned.
[I 2021-05-10 19:26:11,441] Trial 2766 pruned.
[I 2021-05-10 19:26:12,114] Trial 2767 pruned.
[I 2021-05-10 19:26:12,491] Trial 2768 pruned.
[I 2021-05-10 19:26:13,146] Trial 2769 pruned.
[I 2021-05-10 19:26:13,522] Trial 2770 pruned.
[I 2021-05-10 19:26:14,191] Trial 2771 pruned.
[I 2021-05-10 19:26:14,862] Trial 2772 pruned.
[I 2021-05-10 19:26:15,208] Trial 2773 pruned.
[I 2021-05-10 19:26:15,874] Trial 2774 pruned.
[I 2021-05-10 19:26:16,039] Trial 2775 pruned.
[I 2021-05-10 19:26:16,420] Trial 2776 pruned.
[I 2021-05-10 19:26:17,074] Trial 2777 pruned.
[I 2021-05-10 19:26:17,755] Trial 2778 pruned.
[I 2021-05-10 19:26:18,136] Trial 2779 pruned.
[I 2021-05-10 19:26:18,811] Trial 2780 pruned.
[I 2021-05-10 19:26:19,149] Trial 2781 pruned.
[I 2021-05-10 19:26:19,830] Trial 2782 pruned.
[I 2021-05-10 19:26:20,206] Trial 2783 pruned.
[I 2021-05-10 19:26:20,879] Trial 2784 pruned.
[I 2021-05-10 19:26:21,478] Trial 2785 pruned.
[I 2021-05-10 19:26:21,864] Trial 2786 pruned.
[I 2021-05-10 19:26:22,529] Trial 2787 pruned.
[I 2021-05-10 19:26:22,766] Trial 2788 pruned.
[I 2021-05-10 19:26:23,147] Trial 2789 pruned.
[I 2021-05-10 19:26:23,316] Trial 2790 pruned.
[I 2021-05-10 19:26:23,976] Trial 2791 pruned.
[I 2021-05-10 19:26:24,360] Trial 2792 pruned.
[I 2021-05-10 19:26:25,043] Trial 2793 pruned.
[I 2021-05-10 19:26:25,713] Trial 2794 pruned.
[I 2021-05-10 19:26:26,010] Trial 2795 pruned.
[I 2021-05-10 19:26:26,680] Trial 2796 pruned.
[I 2021-05-10 19:26:27,275] Trial 2797 pruned.
[I 2021-05-10 19:26:27,950] Trial 2798 pruned.
[I 2021-05-10 19:26:28,329] Trial 2799 pruned.
[I 2021-05-10 19:26:29,004] Trial 2800 pruned.
[I 2021-05-10 19:26:29,661] Trial 2801 pruned.
[I 2021-05-10 19:26:30,050] Trial 2802 pruned.
[I 2021-05-10 19:26:30,652] Trial 2803 pruned.
[I 2021-05-10 19:26:31,030] Trial 2804 pruned.
[I 2021-05-10 19:26:31,707] Trial 2805 pruned.
[I 2021-05-10 19:26:32,377] Trial 2806 pruned.
[I 2021-05-10 19:26:32,757] Trial 2807 pruned.
[I 2021-05-10 19:26:33,426] Trial 2808 pruned.
[I 2021-05-10 19:26:33,767] Trial 2809 pruned.
[I 2021-05-10 19:26:33,934] Trial 2810 pruned.
[I 2021-05-10 19:26:34,594] Trial 2811 pruned.
[I 2021-05-10 19:26:34,973] Trial 2812 pruned.
[I 2021-05-10 19:26:35,647] Trial 2813 pruned.
[I 2021-05-10 19:26:36,307] Trial 2814 pruned.
[I 2021-05-10 19:26:36,690] Trial 2815 pruned.
[I 2021-05-10 19:26:36,931] Trial 2816 pruned.
[I 2021-05-10 19:26:39,559] Trial 2817 pruned.
[I 2021-05-10 19:26:39,938] Trial 2818 pruned.
[I 2021-05-10 19:26:40,109] Trial 2819 pruned.
[I 2021-05-10 19:26:40,770] Trial 2820 pruned.
[I 2021-05-10 19:26:41,150] Trial 2821 pruned.
[I 2021-05-10 19:26:41,819] Trial 2822 pruned.
[I 2021-05-10 19:26:42,486] Trial 2823 pruned.
[I 2021-05-10 19:26:42,836] Trial 2824 pruned.
[I 2021-05-10 19:26:43,501] Trial 2825 pruned.
[I 2021-05-10 19:26:43,886] Trial 2826 pruned.
[I 2021-05-10 19:26:44,562] Trial 2827 pruned.
[I 2021-05-10 19:26:44,939] Trial 2828 pruned.
[I 2021-05-10 19:26:45,623] Trial 2829 pruned.
[I 2021-05-10 19:26:46,286] Trial 2830 pruned.
[I 2021-05-10 19:26:46,631] Trial 2831 pruned.
[I 2021-05-10 19:26:47,310] Trial 2832 pruned.
[I 2021-05-10 19:26:47,445] Trial 2833 pruned.
[I 2021-05-10 19:26:47,828] Trial 2834 pruned.
[I 2021-05-10 19:26:48,502] Trial 2835 pruned.
[I 2021-05-10 19:26:49,178] Trial 2836 pruned.
[I 2021-05-10 19:26:49,560] Trial 2837 pruned.
[I 2021-05-10 19:26:51,405] Trial 2838 pruned.
[I 2021-05-10 19:26:51,786] Trial 2839 pruned.
[I 2021-05-10 19:26:52,420] Trial 2840 pruned.
[I 2021-05-10 19:26:52,798] Trial 2841 pruned.
[I 2021-05-10 19:26:53,488] Trial 2842 pruned.
[I 2021-05-10 19:26:54,168] Trial 2843 pruned.
[I 2021-05-10 19:26:54,577] Trial 2844 pruned.
[I 2021-05-10 19:26:54,816] Trial 2845 pruned.
[I 2021-05-10 19:26:55,416] Trial 2846 pruned.
[I 2021-05-10 19:26:55,790] Trial 2847 pruned.
[I 2021-05-10 19:26:56,492] Trial 2848 pruned.
[I 2021-05-10 19:26:56,683] Trial 2849 pruned.
[I 2021-05-10 19:26:57,084] Trial 2850 pruned.
[I 2021-05-10 19:27:59,458] Trial 2851 finished with value: 173.693603515625 and parameters: {'lr': 0.001931293372539029, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 904, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:28:00,155] Trial 2852 pruned.
[I 2021-05-10 19:28:00,527] Trial 2853 pruned.
[I 2021-05-10 19:28:01,225] Trial 2854 pruned.
[I 2021-05-10 19:28:01,636] Trial 2855 pruned.
[I 2021-05-10 19:28:02,346] Trial 2856 pruned.
[I 2021-05-10 19:28:02,757] Trial 2857 pruned.
[I 2021-05-10 19:28:03,456] Trial 2858 pruned.
[I 2021-05-10 19:28:04,185] Trial 2859 pruned.
[I 2021-05-10 19:28:04,542] Trial 2860 pruned.
[I 2021-05-10 19:28:05,262] Trial 2861 pruned.
[I 2021-05-10 19:28:05,442] Trial 2862 pruned.
[I 2021-05-10 19:28:05,843] Trial 2863 pruned.
[I 2021-05-10 19:28:06,359] Trial 2864 pruned.
[I 2021-05-10 19:28:07,054] Trial 2865 pruned.
[I 2021-05-10 19:28:07,454] Trial 2866 pruned.
[I 2021-05-10 19:28:08,180] Trial 2867 pruned.
[I 2021-05-10 19:28:08,583] Trial 2868 pruned.
[I 2021-05-10 19:28:09,213] Trial 2869 pruned.
[I 2021-05-10 19:28:09,626] Trial 2870 pruned.
[I 2021-05-10 19:28:10,345] Trial 2871 pruned.
[I 2021-05-10 19:28:11,050] Trial 2872 pruned.
[I 2021-05-10 19:28:11,462] Trial 2873 pruned.
[I 2021-05-10 19:28:12,187] Trial 2874 pruned.
[I 2021-05-10 19:28:12,498] Trial 2875 pruned.
[I 2021-05-10 19:28:12,727] Trial 2876 pruned.
[I 2021-05-10 19:28:12,905] Trial 2877 pruned.
[I 2021-05-10 19:28:13,629] Trial 2878 pruned.
[I 2021-05-10 19:28:14,044] Trial 2879 pruned.
[I 2021-05-10 19:28:15,345] Trial 2880 pruned.
[I 2021-05-10 19:28:15,977] Trial 2881 pruned.
[I 2021-05-10 19:28:16,386] Trial 2882 pruned.
[I 2021-05-10 19:28:17,091] Trial 2883 pruned.
[I 2021-05-10 19:28:17,502] Trial 2884 pruned.
[I 2021-05-10 19:28:18,223] Trial 2885 pruned.
[I 2021-05-10 19:28:18,636] Trial 2886 pruned.
[I 2021-05-10 19:28:19,343] Trial 2887 pruned.
[I 2021-05-10 19:28:20,069] Trial 2888 pruned.
[I 2021-05-10 19:28:20,443] Trial 2889 pruned.
[I 2021-05-10 19:28:21,144] Trial 2890 pruned.
[I 2021-05-10 19:28:21,329] Trial 2891 pruned.
[I 2021-05-10 19:28:21,734] Trial 2892 pruned.
[I 2021-05-10 19:28:22,439] Trial 2893 pruned.
[I 2021-05-10 19:28:23,162] Trial 2894 pruned.
[I 2021-05-10 19:28:23,564] Trial 2895 pruned.
[I 2021-05-10 19:28:24,292] Trial 2896 pruned.
[I 2021-05-10 19:28:24,697] Trial 2897 pruned.
[I 2021-05-10 19:28:25,354] Trial 2898 pruned.
[I 2021-05-10 19:28:25,754] Trial 2899 pruned.
[I 2021-05-10 19:28:26,481] Trial 2900 pruned.
[I 2021-05-10 19:28:27,181] Trial 2901 pruned.
[I 2021-05-10 19:28:27,591] Trial 2902 pruned.
[I 2021-05-10 19:28:28,318] Trial 2903 pruned.
[I 2021-05-10 19:28:28,722] Trial 2904 pruned.
[I 2021-05-10 19:28:28,909] Trial 2905 pruned.
[I 2021-05-10 19:28:29,552] Trial 2906 pruned.
[I 2021-05-10 19:28:29,967] Trial 2907 pruned.
[I 2021-05-10 19:28:30,695] Trial 2908 pruned.
[I 2021-05-10 19:28:30,946] Trial 2909 pruned.
[I 2021-05-10 19:28:31,650] Trial 2910 pruned.
[I 2021-05-10 19:28:32,065] Trial 2911 pruned.
[I 2021-05-10 19:28:32,686] Trial 2912 pruned.
[I 2021-05-10 19:28:33,088] Trial 2913 pruned.
[I 2021-05-10 19:28:33,825] Trial 2914 pruned.
[I 2021-05-10 19:28:34,237] Trial 2915 pruned.
[I 2021-05-10 19:28:34,942] Trial 2916 pruned.
[I 2021-05-10 19:28:36,850] Trial 2917 pruned.
[I 2021-05-10 19:28:37,191] Trial 2918 pruned.
[I 2021-05-10 19:28:37,900] Trial 2919 pruned.
[I 2021-05-10 19:28:38,070] Trial 2920 pruned.
[I 2021-05-10 19:28:38,483] Trial 2921 pruned.
[I 2021-05-10 19:28:39,210] Trial 2922 pruned.
[I 2021-05-10 19:28:39,915] Trial 2923 pruned.
[I 2021-05-10 19:28:40,331] Trial 2924 pruned.
[I 2021-05-10 19:28:41,055] Trial 2925 pruned.
[I 2021-05-10 19:28:41,438] Trial 2926 pruned.
[I 2021-05-10 19:28:42,033] Trial 2927 pruned.
[I 2021-05-10 19:28:42,423] Trial 2928 pruned.
[I 2021-05-10 19:28:43,087] Trial 2929 pruned.
[I 2021-05-10 19:28:43,777] Trial 2930 pruned.
[I 2021-05-10 19:28:44,169] Trial 2931 pruned.
[I 2021-05-10 19:28:44,859] Trial 2932 pruned.
[I 2021-05-10 19:28:45,101] Trial 2933 pruned.
[I 2021-05-10 19:28:45,439] Trial 2934 pruned.
[I 2021-05-10 19:28:45,614] Trial 2935 pruned.
[I 2021-05-10 19:28:46,282] Trial 2936 pruned.
[I 2021-05-10 19:28:46,667] Trial 2937 pruned.
[I 2021-05-10 19:28:47,337] Trial 2938 pruned.
[I 2021-05-10 19:28:48,016] Trial 2939 pruned.
[I 2021-05-10 19:28:48,403] Trial 2940 pruned.
[I 2021-05-10 19:28:49,037] Trial 2941 pruned.
[I 2021-05-10 19:28:49,428] Trial 2942 pruned.
[I 2021-05-10 19:28:50,099] Trial 2943 pruned.
[I 2021-05-10 19:28:50,488] Trial 2944 pruned.
[I 2021-05-10 19:28:51,176] Trial 2945 pruned.
[I 2021-05-10 19:28:51,842] Trial 2946 pruned.
[I 2021-05-10 19:28:52,227] Trial 2947 pruned.
[I 2021-05-10 19:28:53,384] Trial 2948 pruned.
[I 2021-05-10 19:28:53,562] Trial 2949 pruned.
[I 2021-05-10 19:28:53,950] Trial 2950 pruned.
[I 2021-05-10 19:28:54,636] Trial 2951 pruned.
[I 2021-05-10 19:28:55,308] Trial 2952 pruned.
[I 2021-05-10 19:28:55,697] Trial 2953 pruned.
[I 2021-05-10 19:28:56,361] Trial 2954 pruned.
[I 2021-05-10 19:28:56,708] Trial 2955 pruned.
[I 2021-05-10 19:28:57,381] Trial 2956 pruned.
[I 2021-05-10 19:28:57,759] Trial 2957 pruned.
[I 2021-05-10 19:28:58,282] Trial 2958 pruned.
[I 2021-05-10 19:28:58,956] Trial 2959 pruned.
[I 2021-05-10 19:28:59,362] Trial 2960 pruned.
[I 2021-05-10 19:29:00,052] Trial 2961 pruned.
[I 2021-05-10 19:29:00,299] Trial 2962 pruned.
[I 2021-05-10 19:29:00,673] Trial 2963 pruned.
[I 2021-05-10 19:29:00,850] Trial 2964 pruned.
[I 2021-05-10 19:29:01,549] Trial 2965 pruned.
[I 2021-05-10 19:29:01,932] Trial 2966 pruned.
[I 2021-05-10 19:29:04,961] Trial 2967 pruned.
[I 2021-05-10 19:29:05,629] Trial 2968 pruned.
[I 2021-05-10 19:29:06,015] Trial 2969 pruned.
[I 2021-05-10 19:29:06,683] Trial 2970 pruned.
[I 2021-05-10 19:29:07,063] Trial 2971 pruned.
[I 2021-05-10 19:29:07,659] Trial 2972 pruned.
[I 2021-05-10 19:29:08,047] Trial 2973 pruned.
[I 2021-05-10 19:29:12,219] Trial 2974 pruned.
[I 2021-05-10 19:29:12,900] Trial 2975 pruned.
[I 2021-05-10 19:29:13,278] Trial 2976 pruned.
[I 2021-05-10 19:29:13,949] Trial 2977 pruned.
[I 2021-05-10 19:29:14,122] Trial 2978 pruned.
[I 2021-05-10 19:29:14,500] Trial 2979 pruned.
[I 2021-05-10 19:29:15,097] Trial 2980 pruned.
[I 2021-05-10 19:29:15,763] Trial 2981 pruned.
[I 2021-05-10 19:29:16,150] Trial 2982 pruned.
[I 2021-05-10 19:29:16,833] Trial 2983 pruned.
[I 2021-05-10 19:29:17,220] Trial 2984 pruned.
[I 2021-05-10 19:29:17,829] Trial 2985 pruned.
[I 2021-05-10 19:29:18,228] Trial 2986 pruned.
[I 2021-05-10 19:29:18,898] Trial 2987 pruned.
[I 2021-05-10 19:29:19,587] Trial 2988 pruned.
[I 2021-05-10 19:29:19,979] Trial 2989 pruned.
[I 2021-05-10 19:29:20,644] Trial 2990 pruned.
[I 2021-05-10 19:29:20,859] Trial 2991 pruned.
[I 2021-05-10 19:29:21,251] Trial 2992 pruned.
[I 2021-05-10 19:29:21,421] Trial 2993 pruned.
[I 2021-05-10 19:29:22,094] Trial 2994 pruned.
[I 2021-05-10 19:29:22,481] Trial 2995 pruned.
[I 2021-05-10 19:29:23,168] Trial 2996 pruned.
[I 2021-05-10 19:29:23,847] Trial 2997 pruned.
[I 2021-05-10 19:29:24,232] Trial 2998 pruned.
[I 2021-05-10 19:29:24,905] Trial 2999 pruned.
[I 2021-05-10 19:29:25,260] Trial 3000 pruned.
[I 2021-05-10 19:29:25,920] Trial 3001 pruned.
[I 2021-05-10 19:29:26,310] Trial 3002 pruned.
[I 2021-05-10 19:29:26,982] Trial 3003 pruned.
[I 2021-05-10 19:29:27,645] Trial 3004 pruned.
[I 2021-05-10 19:29:28,042] Trial 3005 pruned.
[I 2021-05-10 19:29:28,713] Trial 3006 pruned.
[I 2021-05-10 19:29:29,067] Trial 3007 pruned.
[I 2021-05-10 19:29:29,721] Trial 3008 pruned.
[I 2021-05-10 19:29:29,893] Trial 3009 pruned.
[I 2021-05-10 19:29:30,564] Trial 3010 pruned.
[I 2021-05-10 19:30:00,255] Trial 3011 finished with value: 163.05223083496094 and parameters: {'lr': 0.0027227940856227647, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1008, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:30:00,892] Trial 3012 pruned.
[I 2021-05-10 19:30:01,287] Trial 3013 pruned.
[I 2021-05-10 19:30:01,944] Trial 3014 pruned.
[I 2021-05-10 19:30:02,263] Trial 3015 pruned.
[I 2021-05-10 19:30:02,943] Trial 3016 pruned.
[I 2021-05-10 19:30:03,611] Trial 3017 pruned.
[I 2021-05-10 19:30:03,998] Trial 3018 pruned.
[I 2021-05-10 19:30:04,676] Trial 3019 pruned.
[I 2021-05-10 19:30:05,064] Trial 3020 pruned.
[I 2021-05-10 19:30:05,311] Trial 3021 pruned.
[I 2021-05-10 19:30:05,467] Trial 3022 pruned.
[I 2021-05-10 19:30:07,311] Trial 3023 pruned.
[I 2021-05-10 19:30:07,701] Trial 3024 pruned.
[I 2021-05-10 19:30:08,373] Trial 3025 pruned.
[I 2021-05-10 19:30:09,059] Trial 3026 pruned.
[I 2021-05-10 19:30:09,444] Trial 3027 pruned.
[I 2021-05-10 19:30:10,115] Trial 3028 pruned.
[I 2021-05-10 19:30:10,522] Trial 3029 pruned.
[I 2021-05-10 19:30:11,120] Trial 3030 pruned.
[I 2021-05-10 19:30:11,512] Trial 3031 pruned.
[I 2021-05-10 19:30:12,205] Trial 3032 pruned.
[I 2021-05-10 19:30:12,873] Trial 3033 pruned.
[I 2021-05-10 19:30:13,260] Trial 3034 pruned.
[I 2021-05-10 19:30:13,919] Trial 3035 pruned.
[I 2021-05-10 19:30:14,094] Trial 3036 pruned.
[I 2021-05-10 19:30:14,431] Trial 3037 pruned.
[I 2021-05-10 19:30:15,118] Trial 3038 pruned.
[I 2021-05-10 19:30:15,806] Trial 3039 pruned.
[I 2021-05-10 19:30:16,059] Trial 3040 pruned.
[I 2021-05-10 19:30:16,726] Trial 3041 pruned.
[I 2021-05-10 19:30:17,112] Trial 3042 pruned.
[I 2021-05-10 19:30:17,729] Trial 3043 pruned.
[I 2021-05-10 19:30:18,125] Trial 3044 pruned.
[I 2021-05-10 19:30:18,804] Trial 3045 pruned.
[I 2021-05-10 19:30:20,059] Trial 3046 pruned.
[I 2021-05-10 19:30:20,442] Trial 3047 pruned.
[I 2021-05-10 19:30:21,111] Trial 3048 pruned.
[I 2021-05-10 19:30:21,357] Trial 3049 pruned.
[I 2021-05-10 19:30:21,744] Trial 3050 pruned.
[I 2021-05-10 19:30:21,921] Trial 3051 pruned.
[I 2021-05-10 19:30:22,517] Trial 3052 pruned.
[I 2021-05-10 19:30:22,838] Trial 3053 pruned.
[I 2021-05-10 19:30:23,519] Trial 3054 pruned.
[I 2021-05-10 19:30:24,188] Trial 3055 pruned.
[I 2021-05-10 19:30:24,575] Trial 3056 pruned.
[I 2021-05-10 19:30:25,253] Trial 3057 pruned.
[I 2021-05-10 19:30:25,639] Trial 3058 pruned.
[I 2021-05-10 19:30:26,318] Trial 3059 pruned.
[I 2021-05-10 19:30:26,681] Trial 3060 pruned.
[I 2021-05-10 19:30:27,355] Trial 3061 pruned.
[I 2021-05-10 19:30:28,031] Trial 3062 pruned.
[I 2021-05-10 19:30:28,414] Trial 3063 pruned.
[I 2021-05-10 19:30:30,248] Trial 3064 pruned.
[I 2021-05-10 19:30:30,411] Trial 3065 pruned.
[I 2021-05-10 19:30:30,800] Trial 3066 pruned.
[I 2021-05-10 19:30:31,486] Trial 3067 pruned.
[I 2021-05-10 19:31:30,566] Trial 3068 finished with value: 165.427978515625 and parameters: {'lr': 0.0015677357942574114, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 956, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:31:30,970] Trial 3069 pruned.
[I 2021-05-10 19:31:31,676] Trial 3070 pruned.
[I 2021-05-10 19:31:32,100] Trial 3071 pruned.
[I 2021-05-10 19:31:32,800] Trial 3072 pruned.
[I 2021-05-10 19:31:33,205] Trial 3073 pruned.
[I 2021-05-10 19:31:33,862] Trial 3074 pruned.
[I 2021-05-10 19:31:34,563] Trial 3075 pruned.
[I 2021-05-10 19:32:04,936] Trial 3076 finished with value: 164.9753875732422 and parameters: {'lr': 0.0028669927575469324, 'batch_size': 32, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:32:05,627] Trial 3077 pruned.
[I 2021-05-10 19:32:06,021] Trial 3078 pruned.
[I 2021-05-10 19:32:06,199] Trial 3079 pruned.
[I 2021-05-10 19:32:06,449] Trial 3080 pruned.
[I 2021-05-10 19:32:07,063] Trial 3081 pruned.
[I 2021-05-10 19:32:07,444] Trial 3082 pruned.
[I 2021-05-10 19:32:08,144] Trial 3083 pruned.
[I 2021-05-10 19:32:08,889] Trial 3084 pruned.
[I 2021-05-10 19:32:09,302] Trial 3085 pruned.
[I 2021-05-10 19:32:10,010] Trial 3086 pruned.
[I 2021-05-10 19:32:10,429] Trial 3087 pruned.
[I 2021-05-10 19:32:11,071] Trial 3088 pruned.
[I 2021-05-10 19:32:11,485] Trial 3089 pruned.
[I 2021-05-10 19:32:12,182] Trial 3090 pruned.
[I 2021-05-10 19:32:12,911] Trial 3091 pruned.
[I 2021-05-10 19:32:13,329] Trial 3092 pruned.
[I 2021-05-10 19:32:14,643] Trial 3093 pruned.
[I 2021-05-10 19:32:14,827] Trial 3094 pruned.
[I 2021-05-10 19:32:15,143] Trial 3095 pruned.
[I 2021-05-10 19:32:15,807] Trial 3096 pruned.
[I 2021-05-10 19:32:16,523] Trial 3097 pruned.
[I 2021-05-10 19:32:16,934] Trial 3098 pruned.
[I 2021-05-10 19:32:20,812] Trial 3099 pruned.
[I 2021-05-10 19:32:21,233] Trial 3100 pruned.
[I 2021-05-10 19:32:21,960] Trial 3101 pruned.
[I 2021-05-10 19:32:22,361] Trial 3102 pruned.
[I 2021-05-10 19:32:22,982] Trial 3103 pruned.
[I 2021-05-10 19:32:23,716] Trial 3104 pruned.
[I 2021-05-10 19:32:24,122] Trial 3105 pruned.
[I 2021-05-10 19:32:24,824] Trial 3106 pruned.
[I 2021-05-10 19:32:25,244] Trial 3107 pruned.
[I 2021-05-10 19:32:25,507] Trial 3108 pruned.
[I 2021-05-10 19:32:25,697] Trial 3109 pruned.
[I 2021-05-10 19:32:26,424] Trial 3110 pruned.
[I 2021-05-10 19:32:26,790] Trial 3111 pruned.
[I 2021-05-10 19:32:27,519] Trial 3112 pruned.
[I 2021-05-10 19:32:29,441] Trial 3113 pruned.
[I 2021-05-10 19:32:29,850] Trial 3114 pruned.
[I 2021-05-10 19:32:34,165] Trial 3115 pruned.
[I 2021-05-10 19:32:34,558] Trial 3116 pruned.
[I 2021-05-10 19:32:35,170] Trial 3117 pruned.
[I 2021-05-10 19:32:35,570] Trial 3118 pruned.
[I 2021-05-10 19:32:36,258] Trial 3119 pruned.
[I 2021-05-10 19:32:36,940] Trial 3120 pruned.
[I 2021-05-10 19:32:37,331] Trial 3121 pruned.
[I 2021-05-10 19:33:39,354] Trial 3122 finished with value: 162.05116271972656 and parameters: {'lr': 0.0030527499564237267, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:33:40,090] Trial 3123 pruned.
[I 2021-05-10 19:33:40,269] Trial 3124 pruned.
[I 2021-05-10 19:33:40,694] Trial 3125 pruned.
[I 2021-05-10 19:33:41,419] Trial 3126 pruned.
[I 2021-05-10 19:33:41,827] Trial 3127 pruned.
[I 2021-05-10 19:33:42,568] Trial 3128 pruned.
[I 2021-05-10 19:33:42,974] Trial 3129 pruned.
[I 2021-05-10 19:33:43,690] Trial 3130 pruned.
[I 2021-05-10 19:33:44,396] Trial 3131 pruned.
[I 2021-05-10 19:33:44,773] Trial 3132 pruned.
[I 2021-05-10 19:33:45,507] Trial 3133 pruned.
[I 2021-05-10 19:33:45,919] Trial 3134 pruned.
[I 2021-05-10 19:33:46,649] Trial 3135 pruned.
[I 2021-05-10 19:33:47,071] Trial 3136 pruned.
[I 2021-05-10 19:33:47,805] Trial 3137 pruned.
[I 2021-05-10 19:33:48,069] Trial 3138 pruned.
[I 2021-05-10 19:33:48,733] Trial 3139 pruned.
[I 2021-05-10 19:33:48,923] Trial 3140 pruned.
[I 2021-05-10 19:33:49,331] Trial 3141 pruned.
[I 2021-05-10 19:33:50,066] Trial 3142 pruned.
[I 2021-05-10 19:33:50,475] Trial 3143 pruned.
[I 2021-05-10 19:33:51,188] Trial 3144 pruned.
[I 2021-05-10 19:33:51,861] Trial 3145 pruned.
[I 2021-05-10 19:33:52,281] Trial 3146 pruned.
[I 2021-05-10 19:33:52,995] Trial 3147 pruned.
[I 2021-05-10 19:33:53,314] Trial 3148 pruned.
[I 2021-05-10 19:33:54,050] Trial 3149 pruned.
[I 2021-05-10 19:33:54,477] Trial 3150 pruned.
[I 2021-05-10 19:33:55,203] Trial 3151 pruned.
[I 2021-05-10 19:33:55,375] Trial 3152 pruned.
[I 2021-05-10 19:33:56,090] Trial 3153 pruned.
[I 2021-05-10 19:33:56,507] Trial 3154 pruned.
[I 2021-05-10 19:33:57,243] Trial 3155 pruned.
[I 2021-05-10 19:33:57,665] Trial 3156 pruned.
[I 2021-05-10 19:33:58,397] Trial 3157 pruned.
[I 2021-05-10 19:33:58,809] Trial 3158 pruned.
[I 2021-05-10 19:33:59,539] Trial 3159 pruned.
[I 2021-05-10 19:34:00,178] Trial 3160 pruned.
[I 2021-05-10 19:34:00,597] Trial 3161 pruned.
[I 2021-05-10 19:34:01,333] Trial 3162 pruned.
[I 2021-05-10 19:34:01,757] Trial 3163 pruned.
[I 2021-05-10 19:34:02,475] Trial 3164 pruned.
[I 2021-05-10 19:34:02,900] Trial 3165 pruned.
[I 2021-05-10 19:34:03,626] Trial 3166 pruned.
[I 2021-05-10 19:34:03,826] Trial 3167 pruned.
[I 2021-05-10 19:34:04,094] Trial 3168 pruned.
[I 2021-05-10 19:34:04,823] Trial 3169 pruned.
[I 2021-05-10 19:34:05,194] Trial 3170 pruned.
[I 2021-05-10 19:34:05,920] Trial 3171 pruned.
[I 2021-05-10 19:34:06,343] Trial 3172 pruned.
[I 2021-05-10 19:34:07,063] Trial 3173 pruned.
[I 2021-05-10 19:34:07,781] Trial 3174 pruned.
[I 2021-05-10 19:34:08,158] Trial 3175 pruned.
[I 2021-05-10 19:34:08,895] Trial 3176 pruned.
[I 2021-05-10 19:34:09,311] Trial 3177 pruned.
[I 2021-05-10 19:34:10,049] Trial 3178 pruned.
[I 2021-05-10 19:34:10,468] Trial 3179 pruned.
[I 2021-05-10 19:34:11,202] Trial 3180 pruned.
[I 2021-05-10 19:34:11,390] Trial 3181 pruned.
[I 2021-05-10 19:34:12,046] Trial 3182 pruned.
[I 2021-05-10 19:34:12,462] Trial 3183 pruned.
[I 2021-05-10 19:34:13,199] Trial 3184 pruned.
[I 2021-05-10 19:34:13,620] Trial 3185 pruned.
[I 2021-05-10 19:34:14,358] Trial 3186 pruned.
[I 2021-05-10 19:34:14,766] Trial 3187 pruned.
[I 2021-05-10 19:34:15,486] Trial 3188 pruned.
[I 2021-05-10 19:34:16,213] Trial 3189 pruned.
[I 2021-05-10 19:34:16,597] Trial 3190 pruned.
[I 2021-05-10 19:34:17,200] Trial 3191 pruned.
[I 2021-05-10 19:34:17,664] Trial 3192 pruned.
[I 2021-05-10 19:34:18,406] Trial 3193 pruned.
[I 2021-05-10 19:34:18,818] Trial 3194 pruned.
[I 2021-05-10 19:34:19,452] Trial 3195 pruned.
[I 2021-05-10 19:34:20,169] Trial 3196 pruned.
[I 2021-05-10 19:34:20,361] Trial 3197 pruned.
[I 2021-05-10 19:34:20,630] Trial 3198 pruned.
[I 2021-05-10 19:34:21,051] Trial 3199 pruned.
[I 2021-05-10 19:34:21,766] Trial 3200 pruned.
[I 2021-05-10 19:34:22,192] Trial 3201 pruned.
[I 2021-05-10 19:34:22,936] Trial 3202 pruned.
[I 2021-05-10 19:34:23,645] Trial 3203 pruned.
[I 2021-05-10 19:34:24,053] Trial 3204 pruned.
[I 2021-05-10 19:34:24,767] Trial 3205 pruned.
[I 2021-05-10 19:34:25,150] Trial 3206 pruned.
[I 2021-05-10 19:34:25,865] Trial 3207 pruned.
[I 2021-05-10 19:34:26,292] Trial 3208 pruned.
[I 2021-05-10 19:35:28,116] Trial 3209 finished with value: 179.3822021484375 and parameters: {'lr': 0.0016780530518636094, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:35:28,305] Trial 3210 pruned.
[I 2021-05-10 19:35:28,941] Trial 3211 pruned.
[I 2021-05-10 19:35:29,380] Trial 3212 pruned.
[I 2021-05-10 19:35:30,141] Trial 3213 pruned.
[I 2021-05-10 19:35:30,564] Trial 3214 pruned.
[I 2021-05-10 19:35:31,299] Trial 3215 pruned.
[I 2021-05-10 19:35:31,705] Trial 3216 pruned.
[I 2021-05-10 19:35:32,410] Trial 3217 pruned.
[I 2021-05-10 19:35:33,186] Trial 3218 pruned.
[I 2021-05-10 19:35:33,628] Trial 3219 pruned.
[I 2021-05-10 19:35:34,213] Trial 3220 pruned.
[I 2021-05-10 19:35:34,581] Trial 3221 pruned.
[I 2021-05-10 19:35:35,287] Trial 3222 pruned.
[I 2021-05-10 19:35:35,703] Trial 3223 pruned.
[I 2021-05-10 19:35:36,405] Trial 3224 pruned.
[I 2021-05-10 19:35:36,596] Trial 3225 pruned.
[I 2021-05-10 19:35:37,223] Trial 3226 pruned.
[I 2021-05-10 19:35:37,488] Trial 3227 pruned.
[I 2021-05-10 19:35:37,780] Trial 3228 pruned.
[I 2021-05-10 19:35:38,505] Trial 3229 pruned.
[I 2021-05-10 19:35:38,935] Trial 3230 pruned.
[I 2021-05-10 19:35:39,660] Trial 3231 pruned.
[I 2021-05-10 19:35:40,309] Trial 3232 pruned.
[I 2021-05-10 19:35:40,712] Trial 3233 pruned.
[I 2021-05-10 19:35:41,432] Trial 3234 pruned.
[I 2021-05-10 19:35:41,844] Trial 3235 pruned.
[I 2021-05-10 19:35:42,540] Trial 3236 pruned.
[I 2021-05-10 19:35:42,954] Trial 3237 pruned.
[I 2021-05-10 19:35:43,645] Trial 3238 pruned.
[I 2021-05-10 19:35:43,836] Trial 3239 pruned.
[I 2021-05-10 19:35:44,548] Trial 3240 pruned.
[I 2021-05-10 19:35:44,955] Trial 3241 pruned.
[I 2021-05-10 19:35:45,689] Trial 3242 pruned.
[I 2021-05-10 19:35:46,062] Trial 3243 pruned.
[I 2021-05-10 19:35:46,768] Trial 3244 pruned.
[I 2021-05-10 19:35:47,169] Trial 3245 pruned.
[I 2021-05-10 19:36:49,227] Trial 3246 finished with value: 165.91976928710938 and parameters: {'lr': 0.002590280560322404, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 986, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:36:49,943] Trial 3247 pruned.
[I 2021-05-10 19:36:50,328] Trial 3248 pruned.
[I 2021-05-10 19:36:51,029] Trial 3249 pruned.
[I 2021-05-10 19:36:51,445] Trial 3250 pruned.
[I 2021-05-10 19:36:52,171] Trial 3251 pruned.
[I 2021-05-10 19:36:52,617] Trial 3252 pruned.
[I 2021-05-10 19:36:53,355] Trial 3253 pruned.
[I 2021-05-10 19:36:53,624] Trial 3254 pruned.
[I 2021-05-10 19:36:53,796] Trial 3255 pruned.
[I 2021-05-10 19:36:54,535] Trial 3256 pruned.
[I 2021-05-10 19:36:54,999] Trial 3257 pruned.
[I 2021-05-10 19:36:55,731] Trial 3258 pruned.
[I 2021-05-10 19:36:56,160] Trial 3259 pruned.
[I 2021-05-10 19:36:56,869] Trial 3260 pruned.
[I 2021-05-10 19:36:57,586] Trial 3261 pruned.
[I 2021-05-10 19:36:57,999] Trial 3262 pruned.
[I 2021-05-10 19:36:58,733] Trial 3263 pruned.
[I 2021-05-10 19:36:59,163] Trial 3264 pruned.
[I 2021-05-10 19:36:59,816] Trial 3265 pruned.
[I 2021-05-10 19:37:00,229] Trial 3266 pruned.
[I 2021-05-10 19:37:00,976] Trial 3267 pruned.
[I 2021-05-10 19:37:01,173] Trial 3268 pruned.
[I 2021-05-10 19:37:01,909] Trial 3269 pruned.
[I 2021-05-10 19:37:02,321] Trial 3270 pruned.
[I 2021-05-10 19:37:02,964] Trial 3271 pruned.
[I 2021-05-10 19:37:03,312] Trial 3272 pruned.
[I 2021-05-10 19:37:04,042] Trial 3273 pruned.
[I 2021-05-10 19:37:04,472] Trial 3274 pruned.
[I 2021-05-10 19:37:05,174] Trial 3275 pruned.
[I 2021-05-10 19:37:05,886] Trial 3276 pruned.
[I 2021-05-10 19:37:06,308] Trial 3277 pruned.
[I 2021-05-10 19:37:07,028] Trial 3278 pruned.
[I 2021-05-10 19:37:07,442] Trial 3279 pruned.
[I 2021-05-10 19:37:08,109] Trial 3280 pruned.
[I 2021-05-10 19:37:08,534] Trial 3281 pruned.
[I 2021-05-10 19:37:09,248] Trial 3282 pruned.
[I 2021-05-10 19:37:09,443] Trial 3283 pruned.
[I 2021-05-10 19:37:10,171] Trial 3284 pruned.
[I 2021-05-10 19:37:10,440] Trial 3285 pruned.
[I 2021-05-10 19:37:10,825] Trial 3286 pruned.
[I 2021-05-10 19:37:11,547] Trial 3287 pruned.
[I 2021-05-10 19:37:11,961] Trial 3288 pruned.
[I 2021-05-10 19:37:12,628] Trial 3289 pruned.
[I 2021-05-10 19:37:13,349] Trial 3290 pruned.
[I 2021-05-10 19:37:13,767] Trial 3291 pruned.
[I 2021-05-10 19:37:14,470] Trial 3292 pruned.
[I 2021-05-10 19:37:14,892] Trial 3293 pruned.
[I 2021-05-10 19:37:15,603] Trial 3294 pruned.
[I 2021-05-10 19:37:16,031] Trial 3295 pruned.
[I 2021-05-10 19:37:16,769] Trial 3296 pruned.
[I 2021-05-10 19:37:17,492] Trial 3297 pruned.
[I 2021-05-10 19:37:17,684] Trial 3298 pruned.
[I 2021-05-10 19:37:18,105] Trial 3299 pruned.
[I 2021-05-10 19:37:18,740] Trial 3300 pruned.
[I 2021-05-10 19:37:19,165] Trial 3301 pruned.
[I 2021-05-10 19:37:19,902] Trial 3302 pruned.
[I 2021-05-10 19:37:20,326] Trial 3303 pruned.
[I 2021-05-10 19:37:21,056] Trial 3304 pruned.
[I 2021-05-10 19:37:21,705] Trial 3305 pruned.
[I 2021-05-10 19:37:22,731] Trial 3306 pruned.
[I 2021-05-10 19:37:23,453] Trial 3307 pruned.
[I 2021-05-10 19:37:23,868] Trial 3308 pruned.
[I 2021-05-10 19:37:24,579] Trial 3309 pruned.
[I 2021-05-10 19:37:25,003] Trial 3310 pruned.
[I 2021-05-10 19:37:25,722] Trial 3311 pruned.
[I 2021-05-10 19:37:25,988] Trial 3312 pruned.
[I 2021-05-10 19:37:26,184] Trial 3313 pruned.
[I 2021-05-10 19:37:26,853] Trial 3314 pruned.
[I 2021-05-10 19:37:27,269] Trial 3315 pruned.
[I 2021-05-10 19:37:29,187] Trial 3316 pruned.
[I 2021-05-10 19:37:29,603] Trial 3317 pruned.
[I 2021-05-10 19:37:30,335] Trial 3318 pruned.
[I 2021-05-10 19:37:31,044] Trial 3319 pruned.
[I 2021-05-10 19:37:31,463] Trial 3320 pruned.
[I 2021-05-10 19:37:32,130] Trial 3321 pruned.
[I 2021-05-10 19:37:32,556] Trial 3322 pruned.
[I 2021-05-10 19:37:33,097] Trial 3323 pruned.
[I 2021-05-10 19:37:33,842] Trial 3324 pruned.
[I 2021-05-10 19:37:34,566] Trial 3325 pruned.
[I 2021-05-10 19:37:35,275] Trial 3326 pruned.
[I 2021-05-10 19:37:35,471] Trial 3327 pruned.
[I 2021-05-10 19:37:35,903] Trial 3328 pruned.
[I 2021-05-10 19:37:36,572] Trial 3329 pruned.
[I 2021-05-10 19:37:36,992] Trial 3330 pruned.
[I 2021-05-10 19:37:37,715] Trial 3331 pruned.
[I 2021-05-10 19:37:38,134] Trial 3332 pruned.
[I 2021-05-10 19:37:38,847] Trial 3333 pruned.
[I 2021-05-10 19:37:39,580] Trial 3334 pruned.
[I 2021-05-10 19:37:40,007] Trial 3335 pruned.
[I 2021-05-10 19:37:40,719] Trial 3336 pruned.
[I 2021-05-10 19:37:41,085] Trial 3337 pruned.
[I 2021-05-10 19:37:41,810] Trial 3338 pruned.
[I 2021-05-10 19:37:42,244] Trial 3339 pruned.
[I 2021-05-10 19:37:43,001] Trial 3340 pruned.
[I 2021-05-10 19:37:43,215] Trial 3341 pruned.
[I 2021-05-10 19:37:43,500] Trial 3342 pruned.
[I 2021-05-10 19:37:44,222] Trial 3343 pruned.
[I 2021-05-10 19:37:44,573] Trial 3344 pruned.
[I 2021-05-10 19:37:45,264] Trial 3345 pruned.
[I 2021-05-10 19:37:45,671] Trial 3346 pruned.
[I 2021-05-10 19:37:46,370] Trial 3347 pruned.
[I 2021-05-10 19:37:47,073] Trial 3348 pruned.
[I 2021-05-10 19:37:47,480] Trial 3349 pruned.
[I 2021-05-10 19:37:48,165] Trial 3350 pruned.
[I 2021-05-10 19:37:48,560] Trial 3351 pruned.
[I 2021-05-10 19:37:49,185] Trial 3352 pruned.
[I 2021-05-10 19:37:49,595] Trial 3353 pruned.
[I 2021-05-10 19:37:50,277] Trial 3354 pruned.
[I 2021-05-10 19:37:50,984] Trial 3355 pruned.
[I 2021-05-10 19:37:51,174] Trial 3356 pruned.
[I 2021-05-10 19:37:51,567] Trial 3357 pruned.
[I 2021-05-10 19:37:52,258] Trial 3358 pruned.
[I 2021-05-10 19:37:52,510] Trial 3359 pruned.
[I 2021-05-10 19:37:53,041] Trial 3360 pruned.
[I 2021-05-10 19:37:53,396] Trial 3361 pruned.
[I 2021-05-10 19:37:54,080] Trial 3362 pruned.
[I 2021-05-10 19:37:55,954] Trial 3363 pruned.
[I 2021-05-10 19:37:56,349] Trial 3364 pruned.
[I 2021-05-10 19:37:57,025] Trial 3365 pruned.
[I 2021-05-10 19:37:57,425] Trial 3366 pruned.
[I 2021-05-10 19:37:58,061] Trial 3367 pruned.
[I 2021-05-10 19:37:58,450] Trial 3368 pruned.
[I 2021-05-10 19:37:59,149] Trial 3369 pruned.
[I 2021-05-10 19:37:59,329] Trial 3370 pruned.
[I 2021-05-10 19:37:59,586] Trial 3371 pruned.
[I 2021-05-10 19:38:00,276] Trial 3372 pruned.
[I 2021-05-10 19:38:00,673] Trial 3373 pruned.
[I 2021-05-10 19:38:01,378] Trial 3374 pruned.
[I 2021-05-10 19:38:01,728] Trial 3375 pruned.
[I 2021-05-10 19:38:02,420] Trial 3376 pruned.
[I 2021-05-10 19:38:03,111] Trial 3377 pruned.
[I 2021-05-10 19:38:03,511] Trial 3378 pruned.
[I 2021-05-10 19:38:04,186] Trial 3379 pruned.
[I 2021-05-10 19:38:04,591] Trial 3380 pruned.
[I 2021-05-10 19:38:05,191] Trial 3381 pruned.
[I 2021-05-10 19:38:05,591] Trial 3382 pruned.
[I 2021-05-10 19:38:06,286] Trial 3383 pruned.
[I 2021-05-10 19:38:06,976] Trial 3384 pruned.
[I 2021-05-10 19:38:07,162] Trial 3385 pruned.
[I 2021-05-10 19:38:07,559] Trial 3386 pruned.
[I 2021-05-10 19:38:08,168] Trial 3387 pruned.
[I 2021-05-10 19:38:08,566] Trial 3388 pruned.
[I 2021-05-10 19:38:09,269] Trial 3389 pruned.
[I 2021-05-10 19:38:09,676] Trial 3390 pruned.
[I 2021-05-10 19:38:10,355] Trial 3391 pruned.
[I 2021-05-10 19:38:12,194] Trial 3392 pruned.
[I 2021-05-10 19:38:12,595] Trial 3393 pruned.
[I 2021-05-10 19:38:13,294] Trial 3394 pruned.
[I 2021-05-10 19:38:13,644] Trial 3395 pruned.
[I 2021-05-10 19:38:14,326] Trial 3396 pruned.
[I 2021-05-10 19:38:14,719] Trial 3397 pruned.
[I 2021-05-10 19:38:15,423] Trial 3398 pruned.
[I 2021-05-10 19:38:15,683] Trial 3399 pruned.
[I 2021-05-10 19:38:15,864] Trial 3400 pruned.
[I 2021-05-10 19:38:16,544] Trial 3401 pruned.
[I 2021-05-10 19:38:16,899] Trial 3402 pruned.
[I 2021-05-10 19:38:17,591] Trial 3403 pruned.
[I 2021-05-10 19:38:17,992] Trial 3404 pruned.
[I 2021-05-10 19:38:18,682] Trial 3405 pruned.
[I 2021-05-10 19:38:19,361] Trial 3406 pruned.
[I 2021-05-10 19:38:19,763] Trial 3407 pruned.
[I 2021-05-10 19:38:20,462] Trial 3408 pruned.
[I 2021-05-10 19:38:20,861] Trial 3409 pruned.
[I 2021-05-10 19:38:21,470] Trial 3410 pruned.
[I 2021-05-10 19:38:21,876] Trial 3411 pruned.
[I 2021-05-10 19:38:22,561] Trial 3412 pruned.
[I 2021-05-10 19:38:23,234] Trial 3413 pruned.
[I 2021-05-10 19:38:23,631] Trial 3414 pruned.
[I 2021-05-10 19:38:23,811] Trial 3415 pruned.
[I 2021-05-10 19:38:24,502] Trial 3416 pruned.
[I 2021-05-10 19:38:24,861] Trial 3417 pruned.
[I 2021-05-10 19:38:25,553] Trial 3418 pruned.
[I 2021-05-10 19:38:25,950] Trial 3419 pruned.
[I 2021-05-10 19:38:26,521] Trial 3420 pruned.
[I 2021-05-10 19:38:27,213] Trial 3421 pruned.
[I 2021-05-10 19:38:27,579] Trial 3422 pruned.
[I 2021-05-10 19:38:29,433] Trial 3423 pruned.
[I 2021-05-10 19:38:29,828] Trial 3424 pruned.
[I 2021-05-10 19:38:30,519] Trial 3425 pruned.
[I 2021-05-10 19:38:30,918] Trial 3426 pruned.
[I 2021-05-10 19:38:31,597] Trial 3427 pruned.
[I 2021-05-10 19:38:31,787] Trial 3428 pruned.
[I 2021-05-10 19:38:32,045] Trial 3429 pruned.
[I 2021-05-10 19:38:32,738] Trial 3430 pruned.
[I 2021-05-10 19:38:33,134] Trial 3431 pruned.
[I 2021-05-10 19:38:33,766] Trial 3432 pruned.
[I 2021-05-10 19:38:34,165] Trial 3433 pruned.
[I 2021-05-10 19:38:34,860] Trial 3434 pruned.
[I 2021-05-10 19:38:35,558] Trial 3435 pruned.
[I 2021-05-10 19:38:35,959] Trial 3436 pruned.
[I 2021-05-10 19:38:36,641] Trial 3437 pruned.
[I 2021-05-10 19:38:37,038] Trial 3438 pruned.
[I 2021-05-10 19:38:37,721] Trial 3439 pruned.
[I 2021-05-10 19:38:38,077] Trial 3440 pruned.
[I 2021-05-10 19:38:38,765] Trial 3441 pruned.
[I 2021-05-10 19:38:39,457] Trial 3442 pruned.
[I 2021-05-10 19:38:39,854] Trial 3443 pruned.
[I 2021-05-10 19:38:40,035] Trial 3444 pruned.
[I 2021-05-10 19:38:40,720] Trial 3445 pruned.
[I 2021-05-10 19:38:41,118] Trial 3446 pruned.
[I 2021-05-10 19:38:41,731] Trial 3447 pruned.
[I 2021-05-10 19:38:42,135] Trial 3448 pruned.
[I 2021-05-10 19:38:42,825] Trial 3449 pruned.
[I 2021-05-10 19:38:43,509] Trial 3450 pruned.
[I 2021-05-10 19:38:43,911] Trial 3451 pruned.
[I 2021-05-10 19:38:44,589] Trial 3452 pruned.
[I 2021-05-10 19:38:44,992] Trial 3453 pruned.
[I 2021-05-10 19:38:45,604] Trial 3454 pruned.
[I 2021-05-10 19:38:46,000] Trial 3455 pruned.
[I 2021-05-10 19:38:46,519] Trial 3456 pruned.
[I 2021-05-10 19:38:46,709] Trial 3457 pruned.
[I 2021-05-10 19:38:46,971] Trial 3458 pruned.
[I 2021-05-10 19:38:47,648] Trial 3459 pruned.
[I 2021-05-10 19:38:48,014] Trial 3460 pruned.
[I 2021-05-10 19:38:48,705] Trial 3461 pruned.
[I 2021-05-10 19:38:49,109] Trial 3462 pruned.
[I 2021-05-10 19:38:49,799] Trial 3463 pruned.
[I 2021-05-10 19:38:50,484] Trial 3464 pruned.
[I 2021-05-10 19:38:50,882] Trial 3465 pruned.
[I 2021-05-10 19:38:51,576] Trial 3466 pruned.
[I 2021-05-10 19:38:51,979] Trial 3467 pruned.
[I 2021-05-10 19:38:52,683] Trial 3468 pruned.
[I 2021-05-10 19:38:53,081] Trial 3469 pruned.
[I 2021-05-10 19:38:53,695] Trial 3470 pruned.
[I 2021-05-10 19:39:55,842] Trial 3471 finished with value: 162.7103729248047 and parameters: {'lr': 0.0016832990045347745, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 978, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:39:56,252] Trial 3472 pruned.
[I 2021-05-10 19:39:56,442] Trial 3473 pruned.
[I 2021-05-10 19:39:57,125] Trial 3474 pruned.
[I 2021-05-10 19:39:57,486] Trial 3475 pruned.
[I 2021-05-10 19:39:58,189] Trial 3476 pruned.
[I 2021-05-10 19:39:58,584] Trial 3477 pruned.
[I 2021-05-10 19:39:59,288] Trial 3478 pruned.
[I 2021-05-10 19:39:59,994] Trial 3479 pruned.
[I 2021-05-10 19:40:00,427] Trial 3480 pruned.
[I 2021-05-10 19:40:01,156] Trial 3481 pruned.
[I 2021-05-10 19:40:01,574] Trial 3482 pruned.
[I 2021-05-10 19:40:02,206] Trial 3483 pruned.
[I 2021-05-10 19:40:02,639] Trial 3484 pruned.
[I 2021-05-10 19:40:03,388] Trial 3485 pruned.
[I 2021-05-10 19:40:03,590] Trial 3486 pruned.
[I 2021-05-10 19:40:03,859] Trial 3487 pruned.
[I 2021-05-10 19:40:04,608] Trial 3488 pruned.
[I 2021-05-10 19:40:04,932] Trial 3489 pruned.
[I 2021-05-10 19:40:05,558] Trial 3490 pruned.
[I 2021-05-10 19:40:05,991] Trial 3491 pruned.
[I 2021-05-10 19:40:06,720] Trial 3492 pruned.
[I 2021-05-10 19:40:07,441] Trial 3493 pruned.
[I 2021-05-10 19:40:07,875] Trial 3494 pruned.
[I 2021-05-10 19:40:08,598] Trial 3495 pruned.
[I 2021-05-10 19:40:09,026] Trial 3496 pruned.
[I 2021-05-10 19:40:09,741] Trial 3497 pruned.
[I 2021-05-10 19:40:10,171] Trial 3498 pruned.
[I 2021-05-10 19:40:10,826] Trial 3499 pruned.
[I 2021-05-10 19:40:11,563] Trial 3500 pruned.
[I 2021-05-10 19:40:11,981] Trial 3501 pruned.
[I 2021-05-10 19:40:12,189] Trial 3502 pruned.
[I 2021-05-10 19:40:12,922] Trial 3503 pruned.
[I 2021-05-10 19:40:13,352] Trial 3504 pruned.
[I 2021-05-10 19:40:14,007] Trial 3505 pruned.
[I 2021-05-10 19:40:14,427] Trial 3506 pruned.
[I 2021-05-10 19:40:14,893] Trial 3507 pruned.
[I 2021-05-10 19:40:15,616] Trial 3508 pruned.
[I 2021-05-10 19:40:16,036] Trial 3509 pruned.
[I 2021-05-10 19:40:16,770] Trial 3510 pruned.
[I 2021-05-10 19:40:17,204] Trial 3511 pruned.
[I 2021-05-10 19:40:17,849] Trial 3512 pruned.
[I 2021-05-10 19:40:18,281] Trial 3513 pruned.
[I 2021-05-10 19:40:19,025] Trial 3514 pruned.
[I 2021-05-10 19:40:19,221] Trial 3515 pruned.
[I 2021-05-10 19:40:19,949] Trial 3516 pruned.
[I 2021-05-10 19:40:20,349] Trial 3517 pruned.
[I 2021-05-10 19:40:20,976] Trial 3518 pruned.
[I 2021-05-10 19:40:21,238] Trial 3519 pruned.
[I 2021-05-10 19:40:24,895] Trial 3520 pruned.
[I 2021-05-10 19:40:25,620] Trial 3521 pruned.
[I 2021-05-10 19:40:26,364] Trial 3522 pruned.
[I 2021-05-10 19:40:26,793] Trial 3523 pruned.
[I 2021-05-10 19:40:27,530] Trial 3524 pruned.
[I 2021-05-10 19:40:27,912] Trial 3525 pruned.
[I 2021-05-10 19:40:28,633] Trial 3526 pruned.
[I 2021-05-10 19:40:29,052] Trial 3527 pruned.
[I 2021-05-10 19:40:31,056] Trial 3528 pruned.
[I 2021-05-10 19:40:31,794] Trial 3529 pruned.
[I 2021-05-10 19:40:32,220] Trial 3530 pruned.
[I 2021-05-10 19:40:32,948] Trial 3531 pruned.
[I 2021-05-10 19:40:33,383] Trial 3532 pruned.
[I 2021-05-10 19:40:34,130] Trial 3533 pruned.
[I 2021-05-10 19:40:34,337] Trial 3534 pruned.
[I 2021-05-10 19:40:34,707] Trial 3535 pruned.
[I 2021-05-10 19:40:35,426] Trial 3536 pruned.
[I 2021-05-10 19:40:36,154] Trial 3537 pruned.
[I 2021-05-10 19:40:36,509] Trial 3538 pruned.
[I 2021-05-10 19:40:37,230] Trial 3539 pruned.
[I 2021-05-10 19:40:37,649] Trial 3540 pruned.
[I 2021-05-10 19:40:38,350] Trial 3541 pruned.
[I 2021-05-10 19:40:38,721] Trial 3542 pruned.
[I 2021-05-10 19:40:39,415] Trial 3543 pruned.
[I 2021-05-10 19:40:39,608] Trial 3544 pruned.
[I 2021-05-10 19:40:40,250] Trial 3545 pruned.
[I 2021-05-10 19:40:40,664] Trial 3546 pruned.
[I 2021-05-10 19:40:41,360] Trial 3547 pruned.
[I 2021-05-10 19:40:41,624] Trial 3548 pruned.
[I 2021-05-10 19:40:42,022] Trial 3549 pruned.
[I 2021-05-10 19:40:42,735] Trial 3550 pruned.
[I 2021-05-10 19:40:43,421] Trial 3551 pruned.
[I 2021-05-10 19:40:43,824] Trial 3552 pruned.
[I 2021-05-10 19:40:44,532] Trial 3553 pruned.
[I 2021-05-10 19:40:44,903] Trial 3554 pruned.
[I 2021-05-10 19:40:45,599] Trial 3555 pruned.
[I 2021-05-10 19:40:46,004] Trial 3556 pruned.
[I 2021-05-10 19:40:46,709] Trial 3557 pruned.
[I 2021-05-10 19:40:47,400] Trial 3558 pruned.
[I 2021-05-10 19:40:47,801] Trial 3559 pruned.
[I 2021-05-10 19:40:47,995] Trial 3560 pruned.
[I 2021-05-10 19:40:48,688] Trial 3561 pruned.
[I 2021-05-10 19:40:49,085] Trial 3562 pruned.
[I 2021-05-10 19:40:49,780] Trial 3563 pruned.
[I 2021-05-10 19:40:50,188] Trial 3564 pruned.
[I 2021-05-10 19:40:50,800] Trial 3565 pruned.
[I 2021-05-10 19:40:51,482] Trial 3566 pruned.
[I 2021-05-10 19:40:51,888] Trial 3567 pruned.
[I 2021-05-10 19:40:52,604] Trial 3568 pruned.
[I 2021-05-10 19:40:53,025] Trial 3569 pruned.
[I 2021-05-10 19:40:53,766] Trial 3570 pruned.
[I 2021-05-10 19:40:54,152] Trial 3571 pruned.
[I 2021-05-10 19:40:54,904] Trial 3572 pruned.
[I 2021-05-10 19:40:55,107] Trial 3573 pruned.
[I 2021-05-10 19:40:55,856] Trial 3574 pruned.
[I 2021-05-10 19:40:56,136] Trial 3575 pruned.
[I 2021-05-10 19:40:56,568] Trial 3576 pruned.
[I 2021-05-10 19:40:57,306] Trial 3577 pruned.
[I 2021-05-10 19:40:57,726] Trial 3578 pruned.
[I 2021-05-10 19:40:58,371] Trial 3579 pruned.
[I 2021-05-10 19:40:59,110] Trial 3580 pruned.
[I 2021-05-10 19:40:59,549] Trial 3581 pruned.
[I 2021-05-10 19:41:00,276] Trial 3582 pruned.
[I 2021-05-10 19:41:00,722] Trial 3583 pruned.
[I 2021-05-10 19:41:01,423] Trial 3584 pruned.
[I 2021-05-10 19:41:01,858] Trial 3585 pruned.
[I 2021-05-10 19:41:02,590] Trial 3586 pruned.
[I 2021-05-10 19:41:03,211] Trial 3587 pruned.
[I 2021-05-10 19:41:03,634] Trial 3588 pruned.
[I 2021-05-10 19:41:03,840] Trial 3589 pruned.
[I 2021-05-10 19:41:04,590] Trial 3590 pruned.
[I 2021-05-10 19:41:05,024] Trial 3591 pruned.
[I 2021-05-10 19:41:05,745] Trial 3592 pruned.
[I 2021-05-10 19:41:06,140] Trial 3593 pruned.
[I 2021-05-10 19:41:06,892] Trial 3594 pruned.
[I 2021-05-10 19:41:07,612] Trial 3595 pruned.
[I 2021-05-10 19:41:08,045] Trial 3596 pruned.
[I 2021-05-10 19:41:08,776] Trial 3597 pruned.
[I 2021-05-10 19:41:09,199] Trial 3598 pruned.
[I 2021-05-10 19:41:09,946] Trial 3599 pruned.
[I 2021-05-10 19:41:10,382] Trial 3600 pruned.
[I 2021-05-10 19:41:11,032] Trial 3601 pruned.
[I 2021-05-10 19:41:12,409] Trial 3602 pruned.
[I 2021-05-10 19:41:12,614] Trial 3603 pruned.
[I 2021-05-10 19:41:12,893] Trial 3604 pruned.
[I 2021-05-10 19:41:13,313] Trial 3605 pruned.
[I 2021-05-10 19:41:14,055] Trial 3606 pruned.
[I 2021-05-10 19:41:14,430] Trial 3607 pruned.
[I 2021-05-10 19:41:15,151] Trial 3608 pruned.
[I 2021-05-10 19:41:15,891] Trial 3609 pruned.
[I 2021-05-10 19:41:16,321] Trial 3610 pruned.
[I 2021-05-10 19:41:17,056] Trial 3611 pruned.
[I 2021-05-10 19:41:17,739] Trial 3612 pruned.
[I 2021-05-10 19:41:18,462] Trial 3613 pruned.
[I 2021-05-10 19:41:18,897] Trial 3614 pruned.
[I 2021-05-10 19:41:19,612] Trial 3615 pruned.
[I 2021-05-10 19:41:20,345] Trial 3616 pruned.
[I 2021-05-10 19:41:20,773] Trial 3617 pruned.
[I 2021-05-10 19:41:21,589] Trial 3618 pruned.
[I 2021-05-10 19:41:21,794] Trial 3619 pruned.
[I 2021-05-10 19:41:22,242] Trial 3620 pruned.
[I 2021-05-10 19:41:23,034] Trial 3621 pruned.
[I 2021-05-10 19:41:23,485] Trial 3622 pruned.
[I 2021-05-10 19:41:24,139] Trial 3623 pruned.
[I 2021-05-10 19:41:24,766] Trial 3624 pruned.
[I 2021-05-10 19:41:25,206] Trial 3625 pruned.
[I 2021-05-10 19:41:25,949] Trial 3626 pruned.
[I 2021-05-10 19:41:26,385] Trial 3627 pruned.
[I 2021-05-10 19:42:20,644] Trial 3628 finished with value: 180.75836181640625 and parameters: {'lr': 0.0028183420303472717, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 928, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:42:21,052] Trial 3629 pruned.
[I 2021-05-10 19:42:21,733] Trial 3630 pruned.
[I 2021-05-10 19:42:21,924] Trial 3631 pruned.
[I 2021-05-10 19:42:22,615] Trial 3632 pruned.
[I 2021-05-10 19:42:22,879] Trial 3633 pruned.
[I 2021-05-10 19:42:23,287] Trial 3634 pruned.
[I 2021-05-10 19:42:26,289] Trial 3635 pruned.
[I 2021-05-10 19:42:26,689] Trial 3636 pruned.
[I 2021-05-10 19:42:27,300] Trial 3637 pruned.
[I 2021-05-10 19:42:28,003] Trial 3638 pruned.
[I 2021-05-10 19:42:28,410] Trial 3639 pruned.
[I 2021-05-10 19:42:29,111] Trial 3640 pruned.
[I 2021-05-10 19:42:29,485] Trial 3641 pruned.
[I 2021-05-10 19:42:30,178] Trial 3642 pruned.
[I 2021-05-10 19:42:30,574] Trial 3643 pruned.
[I 2021-05-10 19:42:31,274] Trial 3644 pruned.
[I 2021-05-10 19:42:31,952] Trial 3645 pruned.
[I 2021-05-10 19:42:32,347] Trial 3646 pruned.
[I 2021-05-10 19:42:32,541] Trial 3647 pruned.
[I 2021-05-10 19:42:33,222] Trial 3648 pruned.
[I 2021-05-10 19:42:33,624] Trial 3649 pruned.
[I 2021-05-10 19:42:34,331] Trial 3650 pruned.
[I 2021-05-10 19:42:34,734] Trial 3651 pruned.
[I 2021-05-10 19:42:35,354] Trial 3652 pruned.
[I 2021-05-10 19:42:36,046] Trial 3653 pruned.
[I 2021-05-10 19:42:36,459] Trial 3654 pruned.
[I 2021-05-10 19:42:37,147] Trial 3655 pruned.
[I 2021-05-10 19:42:37,552] Trial 3656 pruned.
[I 2021-05-10 19:42:38,241] Trial 3657 pruned.
[I 2021-05-10 19:42:38,647] Trial 3658 pruned.
[I 2021-05-10 19:42:39,273] Trial 3659 pruned.
[I 2021-05-10 19:42:39,960] Trial 3660 pruned.
[I 2021-05-10 19:42:40,123] Trial 3661 pruned.
[I 2021-05-10 19:42:40,385] Trial 3662 pruned.
[I 2021-05-10 19:42:40,792] Trial 3663 pruned.
[I 2021-05-10 19:42:41,488] Trial 3664 pruned.
[I 2021-05-10 19:42:41,898] Trial 3665 pruned.
[I 2021-05-10 19:42:42,595] Trial 3666 pruned.
[I 2021-05-10 19:42:43,281] Trial 3667 pruned.
[I 2021-05-10 19:42:43,645] Trial 3668 pruned.
[I 2021-05-10 19:42:44,351] Trial 3669 pruned.
[I 2021-05-10 19:42:44,755] Trial 3670 pruned.
[I 2021-05-10 19:42:45,459] Trial 3671 pruned.
[I 2021-05-10 19:42:45,856] Trial 3672 pruned.
[I 2021-05-10 19:42:46,552] Trial 3673 pruned.
[I 2021-05-10 19:42:47,188] Trial 3674 pruned.
[I 2021-05-10 19:42:47,601] Trial 3675 pruned.
[I 2021-05-10 19:42:47,791] Trial 3676 pruned.
[I 2021-05-10 19:42:48,478] Trial 3677 pruned.
[I 2021-05-10 19:42:48,887] Trial 3678 pruned.
[I 2021-05-10 19:42:49,509] Trial 3679 pruned.
[I 2021-05-10 19:42:49,917] Trial 3680 pruned.
[I 2021-05-10 19:42:50,621] Trial 3681 pruned.
[I 2021-05-10 19:42:51,317] Trial 3682 pruned.
[I 2021-05-10 19:42:51,716] Trial 3683 pruned.
[I 2021-05-10 19:42:52,417] Trial 3684 pruned.
[I 2021-05-10 19:42:52,839] Trial 3685 pruned.
[I 2021-05-10 19:42:53,529] Trial 3686 pruned.
[I 2021-05-10 19:42:53,896] Trial 3687 pruned.
[I 2021-05-10 19:42:55,818] Trial 3688 pruned.
[I 2021-05-10 19:42:55,951] Trial 3689 pruned.
[I 2021-05-10 19:42:56,636] Trial 3690 pruned.
[I 2021-05-10 19:42:56,900] Trial 3691 pruned.
[I 2021-05-10 19:42:57,311] Trial 3692 pruned.
[I 2021-05-10 19:42:58,015] Trial 3693 pruned.
[I 2021-05-10 19:42:58,369] Trial 3694 pruned.
[I 2021-05-10 19:42:59,080] Trial 3695 pruned.
[I 2021-05-10 19:42:59,792] Trial 3696 pruned.
[I 2021-05-10 19:43:00,206] Trial 3697 pruned.
[I 2021-05-10 19:43:00,925] Trial 3698 pruned.
[I 2021-05-10 19:43:01,371] Trial 3699 pruned.
[I 2021-05-10 19:43:02,127] Trial 3700 pruned.
[I 2021-05-10 19:43:02,519] Trial 3701 pruned.
[I 2021-05-10 19:43:03,244] Trial 3702 pruned.
[I 2021-05-10 19:43:03,973] Trial 3703 pruned.
[I 2021-05-10 19:43:04,406] Trial 3704 pruned.
[I 2021-05-10 19:43:04,612] Trial 3705 pruned.
[I 2021-05-10 19:43:05,341] Trial 3706 pruned.
[I 2021-05-10 19:43:05,767] Trial 3707 pruned.
[I 2021-05-10 19:43:06,414] Trial 3708 pruned.
[I 2021-05-10 19:43:06,853] Trial 3709 pruned.
[I 2021-05-10 19:43:07,606] Trial 3710 pruned.
[I 2021-05-10 19:43:08,323] Trial 3711 pruned.
[I 2021-05-10 19:43:08,758] Trial 3712 pruned.
[I 2021-05-10 19:43:09,497] Trial 3713 pruned.
[I 2021-05-10 19:43:09,925] Trial 3714 pruned.
[I 2021-05-10 19:43:10,685] Trial 3715 pruned.
[I 2021-05-10 19:43:11,123] Trial 3716 pruned.
[I 2021-05-10 19:43:11,871] Trial 3717 pruned.
[I 2021-05-10 19:43:12,057] Trial 3718 pruned.
[I 2021-05-10 19:43:12,782] Trial 3719 pruned.
[I 2021-05-10 19:43:13,070] Trial 3720 pruned.
[I 2021-05-10 19:43:13,451] Trial 3721 pruned.
[I 2021-05-10 19:43:14,205] Trial 3722 pruned.
[I 2021-05-10 19:43:14,642] Trial 3723 pruned.
[I 2021-05-10 19:43:23,587] Trial 3724 pruned.
[I 2021-05-10 19:43:24,305] Trial 3725 pruned.
[I 2021-05-10 19:43:24,750] Trial 3726 pruned.
[I 2021-05-10 19:43:25,499] Trial 3727 pruned.
[I 2021-05-10 19:43:25,929] Trial 3728 pruned.
[I 2021-05-10 19:43:26,682] Trial 3729 pruned.
[I 2021-05-10 19:43:27,121] Trial 3730 pruned.
[I 2021-05-10 19:43:27,695] Trial 3731 pruned.
[I 2021-05-10 19:43:28,351] Trial 3732 pruned.
[I 2021-05-10 19:43:28,790] Trial 3733 pruned.
[I 2021-05-10 19:43:29,524] Trial 3734 pruned.
[I 2021-05-10 19:43:29,734] Trial 3735 pruned.
[I 2021-05-10 19:43:30,163] Trial 3736 pruned.
[I 2021-05-10 19:44:24,843] Trial 3737 finished with value: 159.85655212402344 and parameters: {'lr': 0.0021672887715163953, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:44:25,549] Trial 3738 pruned.
[I 2021-05-10 19:44:25,957] Trial 3739 pruned.
[I 2021-05-10 19:44:26,777] Trial 3740 pruned.
[I 2021-05-10 19:44:27,475] Trial 3741 pruned.
[I 2021-05-10 19:44:27,859] Trial 3742 pruned.
[I 2021-05-10 19:44:28,562] Trial 3743 pruned.
[I 2021-05-10 19:44:28,976] Trial 3744 pruned.
[I 2021-05-10 19:44:29,670] Trial 3745 pruned.
[I 2021-05-10 19:44:30,081] Trial 3746 pruned.
[I 2021-05-10 19:44:30,781] Trial 3747 pruned.
[I 2021-05-10 19:44:30,965] Trial 3748 pruned.
[I 2021-05-10 19:44:31,660] Trial 3749 pruned.
[I 2021-05-10 19:44:31,927] Trial 3750 pruned.
[I 2021-05-10 19:44:32,348] Trial 3751 pruned.
[I 2021-05-10 19:44:33,055] Trial 3752 pruned.
[I 2021-05-10 19:44:33,416] Trial 3753 pruned.
[I 2021-05-10 19:44:34,133] Trial 3754 pruned.
[I 2021-05-10 19:44:34,830] Trial 3755 pruned.
[I 2021-05-10 19:44:35,242] Trial 3756 pruned.
[I 2021-05-10 19:44:35,942] Trial 3757 pruned.
[I 2021-05-10 19:44:36,352] Trial 3758 pruned.
[I 2021-05-10 19:45:37,681] Trial 3759 finished with value: 163.58164978027344 and parameters: {'lr': 0.00117932630697215, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1008, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:45:38,124] Trial 3760 pruned.
[I 2021-05-10 19:45:38,880] Trial 3761 pruned.
[I 2021-05-10 19:45:39,499] Trial 3762 pruned.
[I 2021-05-10 19:45:39,939] Trial 3763 pruned.
[I 2021-05-10 19:46:34,368] Trial 3764 finished with value: 160.70330810546875 and parameters: {'lr': 0.0026370109076196413, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:46:35,005] Trial 3765 pruned.
[I 2021-05-10 19:46:35,706] Trial 3766 pruned.
[I 2021-05-10 19:46:36,405] Trial 3767 pruned.
[I 2021-05-10 19:46:37,097] Trial 3768 pruned.
[I 2021-05-10 19:46:37,801] Trial 3769 pruned.
[I 2021-05-10 19:46:38,490] Trial 3770 pruned.
[I 2021-05-10 19:46:39,201] Trial 3771 pruned.
[I 2021-05-10 19:46:39,902] Trial 3772 pruned.
[I 2021-05-10 19:46:40,599] Trial 3773 pruned.
[I 2021-05-10 19:46:41,208] Trial 3774 pruned.
[I 2021-05-10 19:46:41,905] Trial 3775 pruned.
[I 2021-05-10 19:47:42,721] Trial 3776 finished with value: 163.61773681640625 and parameters: {'lr': 0.002160431319669241, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:47:43,464] Trial 3777 pruned.
[I 2021-05-10 19:47:44,713] Trial 3778 pruned.
[I 2021-05-10 19:47:45,466] Trial 3779 pruned.
[I 2021-05-10 19:47:46,219] Trial 3780 pruned.
[I 2021-05-10 19:47:46,959] Trial 3781 pruned.
[I 2021-05-10 19:47:47,704] Trial 3782 pruned.
[I 2021-05-10 19:47:48,450] Trial 3783 pruned.
[I 2021-05-10 19:47:49,193] Trial 3784 pruned.
[I 2021-05-10 19:47:49,934] Trial 3785 pruned.
[I 2021-05-10 19:47:50,675] Trial 3786 pruned.
[I 2021-05-10 19:47:51,390] Trial 3787 pruned.
[I 2021-05-10 19:47:52,072] Trial 3788 pruned.
[I 2021-05-10 19:47:52,792] Trial 3789 pruned.
[I 2021-05-10 19:47:53,539] Trial 3790 pruned.
[I 2021-05-10 19:47:54,283] Trial 3791 pruned.
[I 2021-05-10 19:47:55,025] Trial 3792 pruned.
[I 2021-05-10 19:47:55,786] Trial 3793 pruned.
[I 2021-05-10 19:47:56,463] Trial 3794 pruned.
[I 2021-05-10 19:47:57,198] Trial 3795 pruned.
[I 2021-05-10 19:47:57,930] Trial 3796 pruned.
[I 2021-05-10 19:47:58,674] Trial 3797 pruned.
[I 2021-05-10 19:47:59,461] Trial 3798 pruned.
[I 2021-05-10 19:48:00,262] Trial 3799 pruned.
[I 2021-05-10 19:48:01,021] Trial 3800 pruned.
[I 2021-05-10 19:48:01,771] Trial 3801 pruned.
[I 2021-05-10 19:48:02,448] Trial 3802 pruned.
[I 2021-05-10 19:48:03,197] Trial 3803 pruned.
[I 2021-05-10 19:48:03,940] Trial 3804 pruned.
[I 2021-05-10 19:48:04,690] Trial 3805 pruned.
[I 2021-05-10 19:48:05,442] Trial 3806 pruned.
[I 2021-05-10 19:48:06,197] Trial 3807 pruned.
[I 2021-05-10 19:48:06,709] Trial 3808 pruned.
[I 2021-05-10 19:48:07,462] Trial 3809 pruned.
[I 2021-05-10 19:48:08,082] Trial 3810 pruned.
[I 2021-05-10 19:48:08,763] Trial 3811 pruned.
[I 2021-05-10 19:48:09,510] Trial 3812 pruned.
[I 2021-05-10 19:48:10,251] Trial 3813 pruned.
[I 2021-05-10 19:48:10,967] Trial 3814 pruned.
[I 2021-05-10 19:48:12,212] Trial 3815 pruned.
[I 2021-05-10 19:48:12,954] Trial 3816 pruned.
[I 2021-05-10 19:48:13,698] Trial 3817 pruned.
[I 2021-05-10 19:48:14,452] Trial 3818 pruned.
[I 2021-05-10 19:48:15,201] Trial 3819 pruned.
[I 2021-05-10 19:48:15,919] Trial 3820 pruned.
[I 2021-05-10 19:48:16,676] Trial 3821 pruned.
[I 2021-05-10 19:48:17,418] Trial 3822 pruned.
[I 2021-05-10 19:48:18,136] Trial 3823 pruned.
[I 2021-05-10 19:48:19,382] Trial 3824 pruned.
[I 2021-05-10 19:48:20,098] Trial 3825 pruned.
[I 2021-05-10 19:48:20,845] Trial 3826 pruned.
[I 2021-05-10 19:48:21,605] Trial 3827 pruned.
[I 2021-05-10 19:48:22,326] Trial 3828 pruned.
[I 2021-05-10 19:48:22,536] Trial 3829 pruned.
[I 2021-05-10 19:48:23,281] Trial 3830 pruned.
[I 2021-05-10 19:48:24,023] Trial 3831 pruned.
[I 2021-05-10 19:48:24,695] Trial 3832 pruned.
[I 2021-05-10 19:48:25,444] Trial 3833 pruned.
[I 2021-05-10 19:48:26,190] Trial 3834 pruned.
[I 2021-05-10 19:48:26,925] Trial 3835 pruned.
[I 2021-05-10 19:48:27,680] Trial 3836 pruned.
[I 2021-05-10 19:48:28,432] Trial 3837 pruned.
[I 2021-05-10 19:48:28,641] Trial 3838 pruned.
[I 2021-05-10 19:48:29,327] Trial 3839 pruned.
[I 2021-05-10 19:48:29,613] Trial 3840 pruned.
[I 2021-05-10 19:48:30,354] Trial 3841 pruned.
[I 2021-05-10 19:48:31,072] Trial 3842 pruned.
[I 2021-05-10 19:48:31,756] Trial 3843 pruned.
[I 2021-05-10 19:48:32,496] Trial 3844 pruned.
[I 2021-05-10 19:48:33,252] Trial 3845 pruned.
[I 2021-05-10 19:48:34,009] Trial 3846 pruned.
[I 2021-05-10 19:48:34,758] Trial 3847 pruned.
[I 2021-05-10 19:48:35,508] Trial 3848 pruned.
[I 2021-05-10 19:48:35,720] Trial 3849 pruned.
[I 2021-05-10 19:48:36,458] Trial 3850 pruned.
[I 2021-05-10 19:48:37,203] Trial 3851 pruned.
[I 2021-05-10 19:48:37,886] Trial 3852 pruned.
[I 2021-05-10 19:48:38,609] Trial 3853 pruned.
[I 2021-05-10 19:48:39,365] Trial 3854 pruned.
[I 2021-05-10 19:48:41,370] Trial 3855 pruned.
[I 2021-05-10 19:48:42,115] Trial 3856 pruned.
[I 2021-05-10 19:48:42,857] Trial 3857 pruned.
[I 2021-05-10 19:48:43,056] Trial 3858 pruned.
[I 2021-05-10 19:48:43,343] Trial 3859 pruned.
[I 2021-05-10 19:48:43,958] Trial 3860 pruned.
[I 2021-05-10 19:48:44,709] Trial 3861 pruned.
[I 2021-05-10 19:48:45,451] Trial 3862 pruned.
[I 2021-05-10 19:48:46,211] Trial 3863 pruned.
[I 2021-05-10 19:48:46,966] Trial 3864 pruned.
[I 2021-05-10 19:48:48,282] Trial 3865 pruned.
[I 2021-05-10 19:48:49,024] Trial 3866 pruned.
[I 2021-05-10 19:48:49,771] Trial 3867 pruned.
[I 2021-05-10 19:48:49,968] Trial 3868 pruned.
[I 2021-05-10 19:48:50,719] Trial 3869 pruned.
[I 2021-05-10 19:48:51,464] Trial 3870 pruned.
[I 2021-05-10 19:48:52,204] Trial 3871 pruned.
[I 2021-05-10 19:48:52,996] Trial 3872 pruned.
[I 2021-05-10 19:48:53,740] Trial 3873 pruned.
[I 2021-05-10 19:48:54,486] Trial 3874 pruned.
[I 2021-05-10 19:48:55,168] Trial 3875 pruned.
[I 2021-05-10 19:48:55,381] Trial 3876 pruned.
[I 2021-05-10 19:48:56,121] Trial 3877 pruned.
[I 2021-05-10 19:48:56,412] Trial 3878 pruned.
[I 2021-05-10 19:48:57,163] Trial 3879 pruned.
[I 2021-05-10 19:48:57,916] Trial 3880 pruned.
[I 2021-05-10 19:48:58,662] Trial 3881 pruned.
[I 2021-05-10 19:48:59,426] Trial 3882 pruned.
[I 2021-05-10 19:49:00,189] Trial 3883 pruned.
[I 2021-05-10 19:49:00,880] Trial 3884 pruned.
[I 2021-05-10 19:49:01,649] Trial 3885 pruned.
[I 2021-05-10 19:49:01,865] Trial 3886 pruned.
[I 2021-05-10 19:49:02,590] Trial 3887 pruned.
[I 2021-05-10 19:49:03,272] Trial 3888 pruned.
[I 2021-05-10 19:49:04,005] Trial 3889 pruned.
[I 2021-05-10 19:49:04,760] Trial 3890 pruned.
[I 2021-05-10 19:49:05,517] Trial 3891 pruned.
[I 2021-05-10 19:49:06,270] Trial 3892 pruned.
[I 2021-05-10 19:49:07,021] Trial 3893 pruned.
[I 2021-05-10 19:49:07,623] Trial 3894 pruned.
[I 2021-05-10 19:49:07,843] Trial 3895 pruned.
[I 2021-05-10 19:49:08,594] Trial 3896 pruned.
[I 2021-05-10 19:49:09,272] Trial 3897 pruned.
[I 2021-05-10 19:49:09,562] Trial 3898 pruned.
[I 2021-05-10 19:49:10,312] Trial 3899 pruned.
[I 2021-05-10 19:49:10,995] Trial 3900 pruned.
[I 2021-05-10 19:49:11,747] Trial 3901 pruned.
[I 2021-05-10 19:49:12,501] Trial 3902 pruned.
[I 2021-05-10 19:49:13,244] Trial 3903 pruned.
[I 2021-05-10 19:49:13,986] Trial 3904 pruned.
[I 2021-05-10 19:49:14,201] Trial 3905 pruned.
[I 2021-05-10 19:49:14,953] Trial 3906 pruned.
[I 2021-05-10 19:49:15,706] Trial 3907 pruned.
[I 2021-05-10 19:49:16,452] Trial 3908 pruned.
[I 2021-05-10 19:49:17,197] Trial 3909 pruned.
[I 2021-05-10 19:49:17,962] Trial 3910 pruned.
[I 2021-05-10 19:49:18,700] Trial 3911 pruned.
[I 2021-05-10 19:49:19,457] Trial 3912 pruned.
[I 2021-05-10 19:49:20,125] Trial 3913 pruned.
[I 2021-05-10 19:49:20,332] Trial 3914 pruned.
[I 2021-05-10 19:49:21,012] Trial 3915 pruned.
[I 2021-05-10 19:49:21,302] Trial 3916 pruned.
[I 2021-05-10 19:49:22,051] Trial 3917 pruned.
[I 2021-05-10 19:49:22,799] Trial 3918 pruned.
[I 2021-05-10 19:49:23,562] Trial 3919 pruned.
[I 2021-05-10 19:49:24,289] Trial 3920 pruned.
[I 2021-05-10 19:49:25,041] Trial 3921 pruned.
[I 2021-05-10 19:49:25,769] Trial 3922 pruned.
[I 2021-05-10 19:49:26,517] Trial 3923 pruned.
[I 2021-05-10 19:49:26,733] Trial 3924 pruned.
[I 2021-05-10 19:49:27,480] Trial 3925 pruned.
[I 2021-05-10 19:49:28,124] Trial 3926 pruned.
[I 2021-05-10 19:49:28,882] Trial 3927 pruned.
[I 2021-05-10 19:49:29,642] Trial 3928 pruned.
[I 2021-05-10 19:49:30,394] Trial 3929 pruned.
[I 2021-05-10 19:49:36,169] Trial 3930 pruned.
[I 2021-05-10 19:49:36,902] Trial 3931 pruned.
[I 2021-05-10 19:49:37,607] Trial 3932 pruned.
[I 2021-05-10 19:49:37,794] Trial 3933 pruned.
[I 2021-05-10 19:49:38,510] Trial 3934 pruned.
[I 2021-05-10 19:49:39,240] Trial 3935 pruned.
[I 2021-05-10 19:49:39,522] Trial 3936 pruned.
[I 2021-05-10 19:49:40,114] Trial 3937 pruned.
[I 2021-05-10 19:49:40,825] Trial 3938 pruned.
[I 2021-05-10 19:49:41,538] Trial 3939 pruned.
[I 2021-05-10 19:49:42,195] Trial 3940 pruned.
[I 2021-05-10 19:49:42,926] Trial 3941 pruned.
[I 2021-05-10 19:49:43,671] Trial 3942 pruned.
[I 2021-05-10 19:49:43,898] Trial 3943 pruned.
[I 2021-05-10 19:49:44,695] Trial 3944 pruned.
[I 2021-05-10 19:49:45,501] Trial 3945 pruned.
[I 2021-05-10 19:49:47,481] Trial 3946 pruned.
[I 2021-05-10 19:49:48,205] Trial 3947 pruned.
[I 2021-05-10 19:49:48,931] Trial 3948 pruned.
[I 2021-05-10 19:49:49,558] Trial 3949 pruned.
[I 2021-05-10 19:49:49,979] Trial 3950 pruned.
[I 2021-05-10 19:49:50,701] Trial 3951 pruned.
[I 2021-05-10 19:49:51,125] Trial 3952 pruned.
[I 2021-05-10 19:49:51,857] Trial 3953 pruned.
[I 2021-05-10 19:49:52,584] Trial 3954 pruned.
[I 2021-05-10 19:49:52,797] Trial 3955 pruned.
[I 2021-05-10 19:49:53,192] Trial 3956 pruned.
[I 2021-05-10 19:49:53,488] Trial 3957 pruned.
[I 2021-05-10 19:49:54,238] Trial 3958 pruned.
[I 2021-05-10 19:49:54,678] Trial 3959 pruned.
[I 2021-05-10 19:49:55,429] Trial 3960 pruned.
[I 2021-05-10 19:49:56,168] Trial 3961 pruned.
[I 2021-05-10 19:49:56,608] Trial 3962 pruned.
[I 2021-05-10 19:49:57,156] Trial 3963 pruned.
[I 2021-05-10 19:49:57,594] Trial 3964 pruned.
[I 2021-05-10 19:49:58,257] Trial 3965 pruned.
[I 2021-05-10 19:49:58,987] Trial 3966 pruned.
[I 2021-05-10 19:49:59,416] Trial 3967 pruned.
[I 2021-05-10 19:50:00,744] Trial 3968 pruned.
[I 2021-05-10 19:50:01,465] Trial 3969 pruned.
[I 2021-05-10 19:50:01,678] Trial 3970 pruned.
[I 2021-05-10 19:50:02,059] Trial 3971 pruned.
[I 2021-05-10 19:50:02,769] Trial 3972 pruned.
[I 2021-05-10 19:50:03,188] Trial 3973 pruned.
[I 2021-05-10 19:50:03,927] Trial 3974 pruned.
[I 2021-05-10 19:50:04,650] Trial 3975 pruned.
[I 2021-05-10 19:50:05,084] Trial 3976 pruned.
[I 2021-05-10 19:50:05,676] Trial 3977 pruned.
[I 2021-05-10 19:50:06,073] Trial 3978 pruned.
[I 2021-05-10 19:50:06,788] Trial 3979 pruned.
[I 2021-05-10 19:50:08,083] Trial 3980 pruned.
[I 2021-05-10 19:50:08,494] Trial 3981 pruned.
[I 2021-05-10 19:50:09,194] Trial 3982 pruned.
[I 2021-05-10 19:50:09,401] Trial 3983 pruned.
[I 2021-05-10 19:50:10,017] Trial 3984 pruned.
[I 2021-05-10 19:50:10,432] Trial 3985 pruned.
[I 2021-05-10 19:50:11,125] Trial 3986 pruned.
[I 2021-05-10 19:50:11,406] Trial 3987 pruned.
[I 2021-05-10 19:50:11,823] Trial 3988 pruned.
[I 2021-05-10 19:50:12,528] Trial 3989 pruned.
[I 2021-05-10 19:50:13,235] Trial 3990 pruned.
[I 2021-05-10 19:50:13,654] Trial 3991 pruned.
[I 2021-05-10 19:50:14,360] Trial 3992 pruned.
[I 2021-05-10 19:50:14,725] Trial 3993 pruned.
[I 2021-05-10 19:50:15,442] Trial 3994 pruned.
[I 2021-05-10 19:50:16,139] Trial 3995 pruned.
[I 2021-05-10 19:50:16,560] Trial 3996 pruned.
[I 2021-05-10 19:50:17,245] Trial 3997 pruned.
[I 2021-05-10 19:50:17,955] Trial 3998 pruned.
[I 2021-05-10 19:50:18,370] Trial 3999 pruned.
[I 2021-05-10 19:50:18,559] Trial 4000 pruned.
[I 2021-05-10 19:50:19,267] Trial 4001 pruned.
[I 2021-05-10 19:50:19,682] Trial 4002 pruned.
[I 2021-05-10 19:50:20,392] Trial 4003 pruned.
[I 2021-05-10 19:50:21,101] Trial 4004 pruned.
[I 2021-05-10 19:50:21,538] Trial 4005 pruned.
[I 2021-05-10 19:50:22,230] Trial 4006 pruned.
[I 2021-05-10 19:50:22,614] Trial 4007 pruned.
[I 2021-05-10 19:50:23,313] Trial 4008 pruned.
[I 2021-05-10 19:50:26,352] Trial 4009 pruned.
[I 2021-05-10 19:50:26,771] Trial 4010 pruned.
[I 2021-05-10 19:50:27,477] Trial 4011 pruned.
[I 2021-05-10 19:50:27,683] Trial 4012 pruned.
[I 2021-05-10 19:50:28,368] Trial 4013 pruned.
[I 2021-05-10 19:50:28,775] Trial 4014 pruned.
[I 2021-05-10 19:50:29,032] Trial 4015 pruned.
[I 2021-05-10 19:50:29,733] Trial 4016 pruned.
[I 2021-05-10 19:50:30,154] Trial 4017 pruned.
[I 2021-05-10 19:50:30,863] Trial 4018 pruned.
[I 2021-05-10 19:50:31,571] Trial 4019 pruned.
[I 2021-05-10 19:50:31,937] Trial 4020 pruned.
[I 2021-05-10 19:50:32,658] Trial 4021 pruned.
[I 2021-05-10 19:50:33,075] Trial 4022 pruned.
[I 2021-05-10 19:50:33,785] Trial 4023 pruned.
[I 2021-05-10 19:50:34,384] Trial 4024 pruned.
[I 2021-05-10 19:50:34,791] Trial 4025 pruned.
[I 2021-05-10 19:50:35,499] Trial 4026 pruned.
[I 2021-05-10 19:50:35,706] Trial 4027 pruned.
[I 2021-05-10 19:50:36,413] Trial 4028 pruned.
[I 2021-05-10 19:50:36,826] Trial 4029 pruned.
[I 2021-05-10 19:50:37,445] Trial 4030 pruned.
[I 2021-05-10 19:50:37,872] Trial 4031 pruned.
[I 2021-05-10 19:50:38,579] Trial 4032 pruned.
[I 2021-05-10 19:50:39,289] Trial 4033 pruned.
[I 2021-05-10 19:50:39,653] Trial 4034 pruned.
[I 2021-05-10 19:50:40,350] Trial 4035 pruned.
[I 2021-05-10 19:50:40,765] Trial 4036 pruned.
[I 2021-05-10 19:50:41,470] Trial 4037 pruned.
[I 2021-05-10 19:50:42,158] Trial 4038 pruned.
[I 2021-05-10 19:50:42,585] Trial 4039 pruned.
[I 2021-05-10 19:50:43,291] Trial 4040 pruned.
[I 2021-05-10 19:50:43,496] Trial 4041 pruned.
[I 2021-05-10 19:50:44,198] Trial 4042 pruned.
[I 2021-05-10 19:50:44,610] Trial 4043 pruned.
[I 2021-05-10 19:50:44,880] Trial 4044 pruned.
[I 2021-05-10 19:50:45,518] Trial 4045 pruned.
[I 2021-05-10 19:50:45,934] Trial 4046 pruned.
[I 2021-05-10 19:50:46,645] Trial 4047 pruned.
[I 2021-05-10 19:50:47,352] Trial 4048 pruned.
[I 2021-05-10 19:50:47,769] Trial 4049 pruned.
[I 2021-05-10 19:50:48,395] Trial 4050 pruned.
[I 2021-05-10 19:50:48,816] Trial 4051 pruned.
[I 2021-05-10 19:50:49,513] Trial 4052 pruned.
[I 2021-05-10 19:50:50,221] Trial 4053 pruned.
[I 2021-05-10 19:50:50,633] Trial 4054 pruned.
[I 2021-05-10 19:50:51,345] Trial 4055 pruned.
[I 2021-05-10 19:50:51,554] Trial 4056 pruned.
[I 2021-05-10 19:50:52,181] Trial 4057 pruned.
[I 2021-05-10 19:50:52,652] Trial 4058 pruned.
[I 2021-05-10 19:50:53,351] Trial 4059 pruned.
[I 2021-05-10 19:50:53,765] Trial 4060 pruned.
[I 2021-05-10 19:50:54,476] Trial 4061 pruned.
[I 2021-05-10 19:50:55,178] Trial 4062 pruned.
[I 2021-05-10 19:50:55,596] Trial 4063 pruned.
[I 2021-05-10 19:50:56,310] Trial 4064 pruned.
[I 2021-05-10 19:50:56,722] Trial 4065 pruned.
[I 2021-05-10 19:50:57,436] Trial 4066 pruned.
[I 2021-05-10 19:50:58,061] Trial 4067 pruned.
[I 2021-05-10 19:50:58,477] Trial 4068 pruned.
[I 2021-05-10 19:50:59,198] Trial 4069 pruned.
[I 2021-05-10 19:50:59,381] Trial 4070 pruned.
[I 2021-05-10 19:51:00,095] Trial 4071 pruned.
[I 2021-05-10 19:51:00,528] Trial 4072 pruned.
[I 2021-05-10 19:51:01,243] Trial 4073 pruned.
[I 2021-05-10 19:51:01,690] Trial 4074 pruned.
[I 2021-05-10 19:51:02,396] Trial 4075 pruned.
[I 2021-05-10 19:51:02,693] Trial 4076 pruned.
[I 2021-05-10 19:51:03,410] Trial 4077 pruned.
[I 2021-05-10 19:51:03,737] Trial 4078 pruned.
[I 2021-05-10 19:51:55,223] Trial 4079 finished with value: 162.74560546875 and parameters: {'lr': 0.0015123505432637157, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:51:55,639] Trial 4080 pruned.
[I 2021-05-10 19:51:56,339] Trial 4081 pruned.
[I 2021-05-10 19:51:57,051] Trial 4082 pruned.
[I 2021-05-10 19:51:57,475] Trial 4083 pruned.
[I 2021-05-10 19:51:58,189] Trial 4084 pruned.
[I 2021-05-10 19:51:58,385] Trial 4085 pruned.
[I 2021-05-10 19:51:59,101] Trial 4086 pruned.
[I 2021-05-10 19:51:59,406] Trial 4087 pruned.
[I 2021-05-10 19:52:03,573] Trial 4088 pruned.
[I 2021-05-10 19:52:03,995] Trial 4089 pruned.
[I 2021-05-10 19:52:04,705] Trial 4090 pruned.
[I 2021-05-10 19:52:05,413] Trial 4091 pruned.
[I 2021-05-10 19:52:05,822] Trial 4092 pruned.
[I 2021-05-10 19:52:06,533] Trial 4093 pruned.
[I 2021-05-10 19:52:06,948] Trial 4094 pruned.
[I 2021-05-10 19:52:08,233] Trial 4095 pruned.
[I 2021-05-10 19:52:08,867] Trial 4096 pruned.
[I 2021-05-10 19:52:09,283] Trial 4097 pruned.
[I 2021-05-10 19:52:10,005] Trial 4098 pruned.
[I 2021-05-10 19:52:10,212] Trial 4099 pruned.
[I 2021-05-10 19:52:10,920] Trial 4100 pruned.
[I 2021-05-10 19:52:11,296] Trial 4101 pruned.
[I 2021-05-10 19:52:11,572] Trial 4102 pruned.
[I 2021-05-10 19:52:12,275] Trial 4103 pruned.
[I 2021-05-10 19:52:12,698] Trial 4104 pruned.
[I 2021-05-10 19:52:13,420] Trial 4105 pruned.
[I 2021-05-10 19:52:13,996] Trial 4106 pruned.
[I 2021-05-10 19:52:14,414] Trial 4107 pruned.
[I 2021-05-10 19:52:15,119] Trial 4108 pruned.
[I 2021-05-10 19:52:15,539] Trial 4109 pruned.
[I 2021-05-10 19:52:16,253] Trial 4110 pruned.
[I 2021-05-10 19:52:16,879] Trial 4111 pruned.
[I 2021-05-10 19:52:17,288] Trial 4112 pruned.
[I 2021-05-10 19:52:18,015] Trial 4113 pruned.
[I 2021-05-10 19:52:18,227] Trial 4114 pruned.
[I 2021-05-10 19:52:19,544] Trial 4115 pruned.
[I 2021-05-10 19:52:19,968] Trial 4116 pruned.
[I 2021-05-10 19:52:20,681] Trial 4117 pruned.
[I 2021-05-10 19:52:21,101] Trial 4118 pruned.
[I 2021-05-10 19:52:21,728] Trial 4119 pruned.
[I 2021-05-10 19:52:22,421] Trial 4120 pruned.
[I 2021-05-10 19:52:22,837] Trial 4121 pruned.
[I 2021-05-10 19:52:24,140] Trial 4122 pruned.
[I 2021-05-10 19:52:24,552] Trial 4123 pruned.
[I 2021-05-10 19:52:25,263] Trial 4124 pruned.
[I 2021-05-10 19:52:25,972] Trial 4125 pruned.
[I 2021-05-10 19:52:26,691] Trial 4126 pruned.
[I 2021-05-10 19:52:27,321] Trial 4127 pruned.
[I 2021-05-10 19:52:27,528] Trial 4128 pruned.
[I 2021-05-10 19:52:29,424] Trial 4129 pruned.
[I 2021-05-10 19:52:29,840] Trial 4130 pruned.
[I 2021-05-10 19:52:30,550] Trial 4131 pruned.
[I 2021-05-10 19:52:30,824] Trial 4132 pruned.
[I 2021-05-10 19:52:31,197] Trial 4133 pruned.
[I 2021-05-10 19:52:31,906] Trial 4134 pruned.
[I 2021-05-10 19:52:32,611] Trial 4135 pruned.
[I 2021-05-10 19:52:33,037] Trial 4136 pruned.
[I 2021-05-10 19:52:33,753] Trial 4137 pruned.
[I 2021-05-10 19:52:34,176] Trial 4138 pruned.
[I 2021-05-10 19:52:34,890] Trial 4139 pruned.
[I 2021-05-10 19:52:35,514] Trial 4140 pruned.
[I 2021-05-10 19:52:35,938] Trial 4141 pruned.
[I 2021-05-10 19:52:36,652] Trial 4142 pruned.
[I 2021-05-10 19:52:36,864] Trial 4143 pruned.
[I 2021-05-10 19:52:38,149] Trial 4144 pruned.
[I 2021-05-10 19:52:38,531] Trial 4145 pruned.
[I 2021-05-10 19:52:39,102] Trial 4146 pruned.
[I 2021-05-10 19:52:39,523] Trial 4147 pruned.
[I 2021-05-10 19:52:40,822] Trial 4148 pruned.
[I 2021-05-10 19:52:41,527] Trial 4149 pruned.
[I 2021-05-10 19:52:41,949] Trial 4150 pruned.
[I 2021-05-10 19:52:42,656] Trial 4151 pruned.
[I 2021-05-10 19:52:43,083] Trial 4152 pruned.
[I 2021-05-10 19:53:41,742] Trial 4153 finished with value: 165.1366729736328 and parameters: {'lr': 0.0017827989883964198, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:53:42,459] Trial 4154 pruned.
[I 2021-05-10 19:53:42,828] Trial 4155 pruned.
[I 2021-05-10 19:53:43,545] Trial 4156 pruned.
[I 2021-05-10 19:53:43,754] Trial 4157 pruned.
[I 2021-05-10 19:53:44,460] Trial 4158 pruned.
[I 2021-05-10 19:53:44,888] Trial 4159 pruned.
[I 2021-05-10 19:53:45,608] Trial 4160 pruned.
[I 2021-05-10 19:53:45,994] Trial 4161 pruned.
[I 2021-05-10 19:53:46,267] Trial 4162 pruned.
[I 2021-05-10 19:53:46,970] Trial 4163 pruned.
[I 2021-05-10 19:53:47,694] Trial 4164 pruned.
[I 2021-05-10 19:53:48,117] Trial 4165 pruned.
[I 2021-05-10 19:53:48,813] Trial 4166 pruned.
[I 2021-05-10 19:53:49,240] Trial 4167 pruned.
[I 2021-05-10 19:53:49,939] Trial 4168 pruned.
[I 2021-05-10 19:53:51,235] Trial 4169 pruned.
[I 2021-05-10 19:53:51,620] Trial 4170 pruned.
[I 2021-05-10 19:53:52,328] Trial 4171 pruned.
[I 2021-05-10 19:53:52,536] Trial 4172 pruned.
[I 2021-05-10 19:53:53,239] Trial 4173 pruned.
[I 2021-05-10 19:53:53,663] Trial 4174 pruned.
[I 2021-05-10 19:53:54,364] Trial 4175 pruned.
[I 2021-05-10 19:53:54,747] Trial 4176 pruned.
[I 2021-05-10 19:53:55,462] Trial 4177 pruned.
[I 2021-05-10 19:53:56,178] Trial 4178 pruned.
[I 2021-05-10 19:53:56,599] Trial 4179 pruned.
[I 2021-05-10 19:53:57,298] Trial 4180 pruned.
[I 2021-05-10 19:53:57,710] Trial 4181 pruned.
[I 2021-05-10 19:53:58,405] Trial 4182 pruned.
[I 2021-05-10 19:53:59,061] Trial 4183 pruned.
[I 2021-05-10 19:53:59,485] Trial 4184 pruned.
[I 2021-05-10 19:54:00,205] Trial 4185 pruned.
[I 2021-05-10 19:54:00,404] Trial 4186 pruned.
[I 2021-05-10 19:54:01,693] Trial 4187 pruned.
[I 2021-05-10 19:54:02,116] Trial 4188 pruned.
[I 2021-05-10 19:54:02,396] Trial 4189 pruned.
[I 2021-05-10 19:54:03,101] Trial 4190 pruned.
[I 2021-05-10 19:54:03,530] Trial 4191 pruned.
[I 2021-05-10 19:54:04,224] Trial 4192 pruned.
[I 2021-05-10 19:54:04,934] Trial 4193 pruned.
[I 2021-05-10 19:54:05,368] Trial 4194 pruned.
[I 2021-05-10 19:54:06,066] Trial 4195 pruned.
[I 2021-05-10 19:54:06,486] Trial 4196 pruned.
[I 2021-05-10 19:54:07,126] Trial 4197 pruned.
[I 2021-05-10 19:54:07,825] Trial 4198 pruned.
[I 2021-05-10 19:54:08,246] Trial 4199 pruned.
[I 2021-05-10 19:54:08,955] Trial 4200 pruned.
[I 2021-05-10 19:54:09,156] Trial 4201 pruned.
[I 2021-05-10 19:54:09,875] Trial 4202 pruned.
[I 2021-05-10 19:54:10,259] Trial 4203 pruned.
[I 2021-05-10 19:54:10,974] Trial 4204 pruned.
[I 2021-05-10 19:54:11,398] Trial 4205 pruned.
[I 2021-05-10 19:54:12,111] Trial 4206 pruned.
[I 2021-05-10 19:54:12,695] Trial 4207 pruned.
[I 2021-05-10 19:54:13,115] Trial 4208 pruned.
[I 2021-05-10 19:54:13,824] Trial 4209 pruned.
[I 2021-05-10 19:54:14,242] Trial 4210 pruned.
[I 2021-05-10 19:54:14,951] Trial 4211 pruned.
[I 2021-05-10 19:54:15,650] Trial 4212 pruned.
[I 2021-05-10 19:54:16,073] Trial 4213 pruned.
[I 2021-05-10 19:54:16,704] Trial 4214 pruned.
[I 2021-05-10 19:54:17,416] Trial 4215 pruned.
[I 2021-05-10 19:54:17,626] Trial 4216 pruned.
[I 2021-05-10 19:54:18,058] Trial 4217 pruned.
[I 2021-05-10 19:54:18,341] Trial 4218 pruned.
[I 2021-05-10 19:54:18,982] Trial 4219 pruned.
[I 2021-05-10 19:54:19,403] Trial 4220 pruned.
[I 2021-05-10 19:54:20,118] Trial 4221 pruned.
[I 2021-05-10 19:54:20,826] Trial 4222 pruned.
[I 2021-05-10 19:54:21,249] Trial 4223 pruned.
[I 2021-05-10 19:54:21,886] Trial 4224 pruned.
[I 2021-05-10 19:54:22,308] Trial 4225 pruned.
[I 2021-05-10 19:54:23,001] Trial 4226 pruned.
[I 2021-05-10 19:54:23,702] Trial 4227 pruned.
[I 2021-05-10 19:54:24,123] Trial 4228 pruned.
[I 2021-05-10 19:54:24,829] Trial 4229 pruned.
[I 2021-05-10 19:54:25,038] Trial 4230 pruned.
[I 2021-05-10 19:54:25,751] Trial 4231 pruned.
[I 2021-05-10 19:54:26,178] Trial 4232 pruned.
[I 2021-05-10 19:54:26,827] Trial 4233 pruned.
[I 2021-05-10 19:54:27,257] Trial 4234 pruned.
[I 2021-05-10 19:54:28,533] Trial 4235 pruned.
[I 2021-05-10 19:54:29,246] Trial 4236 pruned.
[I 2021-05-10 19:54:29,672] Trial 4237 pruned.
[I 2021-05-10 19:54:30,373] Trial 4238 pruned.
[I 2021-05-10 19:54:30,724] Trial 4239 pruned.
[I 2021-05-10 19:54:31,423] Trial 4240 pruned.
[I 2021-05-10 19:54:32,048] Trial 4241 pruned.
[I 2021-05-10 19:54:32,483] Trial 4242 pruned.
[I 2021-05-10 19:54:33,203] Trial 4243 pruned.
[I 2021-05-10 19:54:33,417] Trial 4244 pruned.
[I 2021-05-10 19:54:34,134] Trial 4245 pruned.
[I 2021-05-10 19:54:34,564] Trial 4246 pruned.
[I 2021-05-10 19:54:34,814] Trial 4247 pruned.
[I 2021-05-10 19:54:35,536] Trial 4248 pruned.
[I 2021-05-10 19:54:35,955] Trial 4249 pruned.
[I 2021-05-10 19:54:36,670] Trial 4250 pruned.
[I 2021-05-10 19:54:37,389] Trial 4251 pruned.
[I 2021-05-10 19:54:37,829] Trial 4252 pruned.
[I 2021-05-10 19:54:38,534] Trial 4253 pruned.
[I 2021-05-10 19:54:38,947] Trial 4254 pruned.
[I 2021-05-10 19:54:39,646] Trial 4255 pruned.
[I 2021-05-10 19:54:40,289] Trial 4256 pruned.
[I 2021-05-10 19:54:40,713] Trial 4257 pruned.
[I 2021-05-10 19:54:41,426] Trial 4258 pruned.
[I 2021-05-10 19:54:41,612] Trial 4259 pruned.
[I 2021-05-10 19:54:42,345] Trial 4260 pruned.
[I 2021-05-10 19:54:42,765] Trial 4261 pruned.
[I 2021-05-10 19:54:43,469] Trial 4262 pruned.
[I 2021-05-10 19:54:43,894] Trial 4263 pruned.
[I 2021-05-10 19:54:44,610] Trial 4264 pruned.
[I 2021-05-10 19:54:45,326] Trial 4265 pruned.
[I 2021-05-10 19:54:45,753] Trial 4266 pruned.
[I 2021-05-10 19:54:46,448] Trial 4267 pruned.
[I 2021-05-10 19:54:46,874] Trial 4268 pruned.
[I 2021-05-10 19:54:47,579] Trial 4269 pruned.
[I 2021-05-10 19:54:48,229] Trial 4270 pruned.
[I 2021-05-10 19:54:48,655] Trial 4271 pruned.
[I 2021-05-10 19:54:49,377] Trial 4272 pruned.
[I 2021-05-10 19:54:49,583] Trial 4273 pruned.
[I 2021-05-10 19:54:50,299] Trial 4274 pruned.
[I 2021-05-10 19:54:50,727] Trial 4275 pruned.
[I 2021-05-10 19:54:51,005] Trial 4276 pruned.
[I 2021-05-10 19:54:51,645] Trial 4277 pruned.
[I 2021-05-10 19:54:52,072] Trial 4278 pruned.
[I 2021-05-10 19:54:52,809] Trial 4279 pruned.
[I 2021-05-10 19:54:53,549] Trial 4280 pruned.
[I 2021-05-10 19:54:53,964] Trial 4281 pruned.
[I 2021-05-10 19:54:54,654] Trial 4282 pruned.
[I 2021-05-10 19:54:55,049] Trial 4283 pruned.
[I 2021-05-10 19:54:55,761] Trial 4284 pruned.
[I 2021-05-10 19:54:56,478] Trial 4285 pruned.
[I 2021-05-10 19:54:56,895] Trial 4286 pruned.
[I 2021-05-10 19:54:57,408] Trial 4287 pruned.
[I 2021-05-10 19:54:58,116] Trial 4288 pruned.
[I 2021-05-10 19:54:58,548] Trial 4289 pruned.
[I 2021-05-10 19:54:58,762] Trial 4290 pruned.
[I 2021-05-10 19:54:59,488] Trial 4291 pruned.
[I 2021-05-10 19:54:59,881] Trial 4292 pruned.
[I 2021-05-10 19:55:00,619] Trial 4293 pruned.
[I 2021-05-10 19:55:01,350] Trial 4294 pruned.
[I 2021-05-10 19:55:01,765] Trial 4295 pruned.
[I 2021-05-10 19:55:03,087] Trial 4296 pruned.
[I 2021-05-10 19:55:03,516] Trial 4297 pruned.
[I 2021-05-10 19:55:04,224] Trial 4298 pruned.
[I 2021-05-10 19:55:04,934] Trial 4299 pruned.
[I 2021-05-10 19:55:05,310] Trial 4300 pruned.
[I 2021-05-10 19:55:06,025] Trial 4301 pruned.
[I 2021-05-10 19:55:06,742] Trial 4302 pruned.
[I 2021-05-10 19:55:06,959] Trial 4303 pruned.
[I 2021-05-10 19:55:07,380] Trial 4304 pruned.
[I 2021-05-10 19:55:07,628] Trial 4305 pruned.
[I 2021-05-10 19:55:08,349] Trial 4306 pruned.
[I 2021-05-10 19:55:08,776] Trial 4307 pruned.
[I 2021-05-10 19:55:09,486] Trial 4308 pruned.
[I 2021-05-10 19:55:10,074] Trial 4309 pruned.
[I 2021-05-10 19:55:10,499] Trial 4310 pruned.
[I 2021-05-10 19:55:11,206] Trial 4311 pruned.
[I 2021-05-10 19:55:11,633] Trial 4312 pruned.
[I 2021-05-10 19:55:12,283] Trial 4313 pruned.
[I 2021-05-10 19:55:12,997] Trial 4314 pruned.
[I 2021-05-10 19:55:13,428] Trial 4315 pruned.
[I 2021-05-10 19:55:14,130] Trial 4316 pruned.
[I 2021-05-10 19:55:14,336] Trial 4317 pruned.
[I 2021-05-10 19:55:15,645] Trial 4318 pruned.
[I 2021-05-10 19:55:16,373] Trial 4319 pruned.
[I 2021-05-10 19:55:18,269] Trial 4320 pruned.
[I 2021-05-10 19:55:18,688] Trial 4321 pruned.
[I 2021-05-10 19:55:19,399] Trial 4322 pruned.
[I 2021-05-10 19:55:20,064] Trial 4323 pruned.
[I 2021-05-10 19:55:20,486] Trial 4324 pruned.
[I 2021-05-10 19:55:21,774] Trial 4325 pruned.
[I 2021-05-10 19:55:22,203] Trial 4326 pruned.
[I 2021-05-10 19:55:22,926] Trial 4327 pruned.
[I 2021-05-10 19:55:23,639] Trial 4328 pruned.
[I 2021-05-10 19:55:24,067] Trial 4329 pruned.
[I 2021-05-10 19:55:24,709] Trial 4330 pruned.
[I 2021-05-10 19:55:24,920] Trial 4331 pruned.
[I 2021-05-10 19:55:25,622] Trial 4332 pruned.
[I 2021-05-10 19:55:26,051] Trial 4333 pruned.
[I 2021-05-10 19:55:26,763] Trial 4334 pruned.
[I 2021-05-10 19:55:27,115] Trial 4335 pruned.
[I 2021-05-10 19:55:27,405] Trial 4336 pruned.
[I 2021-05-10 19:55:28,123] Trial 4337 pruned.
[I 2021-05-10 19:55:28,780] Trial 4338 pruned.
[I 2021-05-10 19:55:29,205] Trial 4339 pruned.
[I 2021-05-10 19:55:29,921] Trial 4340 pruned.
[I 2021-05-10 19:55:30,346] Trial 4341 pruned.
[I 2021-05-10 19:55:31,071] Trial 4342 pruned.
[I 2021-05-10 19:55:31,790] Trial 4343 pruned.
[I 2021-05-10 19:55:32,221] Trial 4344 pruned.
[I 2021-05-10 19:55:32,871] Trial 4345 pruned.
[I 2021-05-10 19:55:33,089] Trial 4346 pruned.
[I 2021-05-10 19:55:33,798] Trial 4347 pruned.
[I 2021-05-10 19:55:34,226] Trial 4348 pruned.
[I 2021-05-10 19:55:34,933] Trial 4349 pruned.
[I 2021-05-10 19:55:35,362] Trial 4350 pruned.
[I 2021-05-10 19:55:36,085] Trial 4351 pruned.
[I 2021-05-10 19:55:36,805] Trial 4352 pruned.
[I 2021-05-10 19:55:37,198] Trial 4353 pruned.
[I 2021-05-10 19:55:37,901] Trial 4354 pruned.
[I 2021-05-10 19:55:38,332] Trial 4355 pruned.
[I 2021-05-10 19:55:39,032] Trial 4356 pruned.
[I 2021-05-10 19:55:39,749] Trial 4357 pruned.
[I 2021-05-10 19:55:40,177] Trial 4358 pruned.
[I 2021-05-10 19:55:40,812] Trial 4359 pruned.
[I 2021-05-10 19:55:41,029] Trial 4360 pruned.
[I 2021-05-10 19:55:41,738] Trial 4361 pruned.
[I 2021-05-10 19:55:42,165] Trial 4362 pruned.
[I 2021-05-10 19:55:42,864] Trial 4363 pruned.
[I 2021-05-10 19:55:43,146] Trial 4364 pruned.
[I 2021-05-10 19:55:43,568] Trial 4365 pruned.
[I 2021-05-10 19:55:44,186] Trial 4366 pruned.
[I 2021-05-10 19:55:44,903] Trial 4367 pruned.
[I 2021-05-10 19:55:45,324] Trial 4368 pruned.
[I 2021-05-10 19:55:46,059] Trial 4369 pruned.
[I 2021-05-10 19:55:46,484] Trial 4370 pruned.
[I 2021-05-10 19:55:47,785] Trial 4371 pruned.
[I 2021-05-10 19:55:48,405] Trial 4372 pruned.
[I 2021-05-10 19:55:48,836] Trial 4373 pruned.
[I 2021-05-10 19:55:49,542] Trial 4374 pruned.
[I 2021-05-10 19:55:50,256] Trial 4375 pruned.
[I 2021-05-10 19:55:50,679] Trial 4376 pruned.
[I 2021-05-10 19:55:51,396] Trial 4377 pruned.
[I 2021-05-10 19:55:51,606] Trial 4378 pruned.
[I 2021-05-10 19:55:51,984] Trial 4379 pruned.
[I 2021-05-10 19:55:52,698] Trial 4380 pruned.
[I 2021-05-10 19:55:53,411] Trial 4381 pruned.
[I 2021-05-10 19:55:53,843] Trial 4382 pruned.
[I 2021-05-10 19:55:54,548] Trial 4383 pruned.
[I 2021-05-10 19:55:54,978] Trial 4384 pruned.
[I 2021-05-10 19:55:55,690] Trial 4385 pruned.
[I 2021-05-10 19:55:56,317] Trial 4386 pruned.
[I 2021-05-10 19:55:56,748] Trial 4387 pruned.
[I 2021-05-10 19:55:57,468] Trial 4388 pruned.
[I 2021-05-10 19:55:58,178] Trial 4389 pruned.
[I 2021-05-10 19:55:58,391] Trial 4390 pruned.
[I 2021-05-10 19:55:58,746] Trial 4391 pruned.
[I 2021-05-10 19:55:59,470] Trial 4392 pruned.
[I 2021-05-10 19:55:59,759] Trial 4393 pruned.
[I 2021-05-10 19:56:00,188] Trial 4394 pruned.
[I 2021-05-10 19:56:00,894] Trial 4395 pruned.
[I 2021-05-10 19:56:01,626] Trial 4396 pruned.
[I 2021-05-10 19:56:02,019] Trial 4397 pruned.
[I 2021-05-10 19:56:02,732] Trial 4398 pruned.
[I 2021-05-10 19:56:03,162] Trial 4399 pruned.
[I 2021-05-10 19:56:03,891] Trial 4400 pruned.
[I 2021-05-10 19:56:04,598] Trial 4401 pruned.
[I 2021-05-10 19:56:04,983] Trial 4402 pruned.
[I 2021-05-10 19:56:05,691] Trial 4403 pruned.
[I 2021-05-10 19:56:06,404] Trial 4404 pruned.
[I 2021-05-10 19:56:06,685] Trial 4405 pruned.
[I 2021-05-10 19:56:06,900] Trial 4406 pruned.
[I 2021-05-10 19:56:07,626] Trial 4407 pruned.
[I 2021-05-10 19:56:08,055] Trial 4408 pruned.
[I 2021-05-10 19:56:08,781] Trial 4409 pruned.
[I 2021-05-10 19:56:09,418] Trial 4410 pruned.
[I 2021-05-10 19:56:09,851] Trial 4411 pruned.
[I 2021-05-10 19:56:10,578] Trial 4412 pruned.
[I 2021-05-10 19:56:11,005] Trial 4413 pruned.
[I 2021-05-10 19:56:11,718] Trial 4414 pruned.
[I 2021-05-10 19:56:12,434] Trial 4415 pruned.
[I 2021-05-10 19:56:12,830] Trial 4416 pruned.
[I 2021-05-10 19:56:13,537] Trial 4417 pruned.
[I 2021-05-10 19:56:14,627] Trial 4418 pruned.
[I 2021-05-10 19:56:14,835] Trial 4419 pruned.
[I 2021-05-10 19:56:15,260] Trial 4420 pruned.
[I 2021-05-10 19:56:15,986] Trial 4421 pruned.
[I 2021-05-10 19:56:16,271] Trial 4422 pruned.
[I 2021-05-10 19:56:16,692] Trial 4423 pruned.
[I 2021-05-10 19:56:17,414] Trial 4424 pruned.
[I 2021-05-10 19:56:18,142] Trial 4425 pruned.
[I 2021-05-10 19:56:18,534] Trial 4426 pruned.
[I 2021-05-10 19:56:19,240] Trial 4427 pruned.
[I 2021-05-10 19:56:19,674] Trial 4428 pruned.
[I 2021-05-10 19:56:20,396] Trial 4429 pruned.
[I 2021-05-10 19:56:21,115] Trial 4430 pruned.
[I 2021-05-10 19:56:21,507] Trial 4431 pruned.
[I 2021-05-10 19:56:22,797] Trial 4432 pruned.
[I 2021-05-10 19:56:23,506] Trial 4433 pruned.
[I 2021-05-10 19:56:23,718] Trial 4434 pruned.
[I 2021-05-10 19:56:24,149] Trial 4435 pruned.
[I 2021-05-10 19:56:24,867] Trial 4436 pruned.
[I 2021-05-10 19:56:25,295] Trial 4437 pruned.
[I 2021-05-10 19:56:26,004] Trial 4438 pruned.
[I 2021-05-10 19:56:26,740] Trial 4439 pruned.
[I 2021-05-10 19:56:27,137] Trial 4440 pruned.
[I 2021-05-10 19:56:27,861] Trial 4441 pruned.
[I 2021-05-10 19:56:28,293] Trial 4442 pruned.
[I 2021-05-10 19:56:29,002] Trial 4443 pruned.
[I 2021-05-10 19:56:29,615] Trial 4444 pruned.
[I 2021-05-10 19:56:30,049] Trial 4445 pruned.
[I 2021-05-10 19:56:30,763] Trial 4446 pruned.
[I 2021-05-10 19:56:31,469] Trial 4447 pruned.
[I 2021-05-10 19:56:31,685] Trial 4448 pruned.
[I 2021-05-10 19:56:32,117] Trial 4449 pruned.
[I 2021-05-10 19:56:32,841] Trial 4450 pruned.
[I 2021-05-10 19:56:33,272] Trial 4451 pruned.
[I 2021-05-10 19:56:33,985] Trial 4452 pruned.
[I 2021-05-10 19:56:34,635] Trial 4453 pruned.
[I 2021-05-10 19:56:35,065] Trial 4454 pruned.
[I 2021-05-10 19:56:35,782] Trial 4455 pruned.
[I 2021-05-10 19:56:36,209] Trial 4456 pruned.
[I 2021-05-10 19:56:36,493] Trial 4457 pruned.
[I 2021-05-10 19:56:37,203] Trial 4458 pruned.
[I 2021-05-10 19:56:38,511] Trial 4459 pruned.
[I 2021-05-10 19:56:38,941] Trial 4460 pruned.
[I 2021-05-10 19:56:39,655] Trial 4461 pruned.
[I 2021-05-10 19:56:40,377] Trial 4462 pruned.
[I 2021-05-10 19:56:40,767] Trial 4463 pruned.
[I 2021-05-10 19:56:40,985] Trial 4464 pruned.
[I 2021-05-10 19:56:41,640] Trial 4465 pruned.
[I 2021-05-10 19:56:42,069] Trial 4466 pruned.
[I 2021-05-10 19:56:42,781] Trial 4467 pruned.
[I 2021-05-10 19:56:43,377] Trial 4468 pruned.
[I 2021-05-10 19:56:43,806] Trial 4469 pruned.
[I 2021-05-10 19:56:44,527] Trial 4470 pruned.
[I 2021-05-10 19:56:44,957] Trial 4471 pruned.
[I 2021-05-10 19:56:45,670] Trial 4472 pruned.
[I 2021-05-10 19:56:46,375] Trial 4473 pruned.
[I 2021-05-10 19:56:46,812] Trial 4474 pruned.
[I 2021-05-10 19:56:47,538] Trial 4475 pruned.
[I 2021-05-10 19:56:48,182] Trial 4476 pruned.
[I 2021-05-10 19:56:48,404] Trial 4477 pruned.
[I 2021-05-10 19:56:48,836] Trial 4478 pruned.
[I 2021-05-10 19:56:49,556] Trial 4479 pruned.
[I 2021-05-10 19:56:49,837] Trial 4480 pruned.
[I 2021-05-10 19:56:50,265] Trial 4481 pruned.
[I 2021-05-10 19:57:49,499] Trial 4482 finished with value: 162.79795837402344 and parameters: {'lr': 0.0024604425623038807, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 19:57:50,212] Trial 4483 pruned.
[I 2021-05-10 19:57:50,598] Trial 4484 pruned.
[I 2021-05-10 19:57:51,319] Trial 4485 pruned.
[I 2021-05-10 19:57:51,748] Trial 4486 pruned.
[I 2021-05-10 19:57:52,389] Trial 4487 pruned.
[I 2021-05-10 19:57:53,110] Trial 4488 pruned.
[I 2021-05-10 19:57:53,532] Trial 4489 pruned.
[I 2021-05-10 19:57:54,277] Trial 4490 pruned.
[I 2021-05-10 19:57:54,994] Trial 4491 pruned.
[I 2021-05-10 19:57:55,213] Trial 4492 pruned.
[I 2021-05-10 19:57:55,641] Trial 4493 pruned.
[I 2021-05-10 19:57:56,294] Trial 4494 pruned.
[I 2021-05-10 19:57:56,724] Trial 4495 pruned.
[I 2021-05-10 19:57:58,016] Trial 4496 pruned.
[I 2021-05-10 19:57:58,740] Trial 4497 pruned.
[I 2021-05-10 19:57:59,170] Trial 4498 pruned.
[I 2021-05-10 19:57:59,887] Trial 4499 pruned.
[I 2021-05-10 19:58:00,318] Trial 4500 pruned.
[I 2021-05-10 19:58:01,026] Trial 4501 pruned.
[I 2021-05-10 19:58:01,748] Trial 4502 pruned.
[I 2021-05-10 19:58:02,132] Trial 4503 pruned.
[I 2021-05-10 19:58:02,844] Trial 4504 pruned.
[I 2021-05-10 19:58:03,565] Trial 4505 pruned.
[I 2021-05-10 19:58:03,989] Trial 4506 pruned.
[I 2021-05-10 19:58:04,711] Trial 4507 pruned.
[I 2021-05-10 19:58:04,927] Trial 4508 pruned.
[I 2021-05-10 19:58:05,211] Trial 4509 pruned.
[I 2021-05-10 19:58:05,645] Trial 4510 pruned.
[I 2021-05-10 19:58:06,360] Trial 4511 pruned.
[I 2021-05-10 19:58:07,086] Trial 4512 pruned.
[I 2021-05-10 19:58:07,488] Trial 4513 pruned.
[I 2021-05-10 19:58:08,213] Trial 4514 pruned.
[I 2021-05-10 19:58:08,645] Trial 4515 pruned.
[I 2021-05-10 19:58:09,360] Trial 4516 pruned.
[I 2021-05-10 19:58:09,993] Trial 4517 pruned.
[I 2021-05-10 19:58:10,428] Trial 4518 pruned.
[I 2021-05-10 19:58:12,309] Trial 4519 pruned.
[I 2021-05-10 19:58:12,924] Trial 4520 pruned.
[I 2021-05-10 19:58:13,142] Trial 4521 pruned.
[I 2021-05-10 19:58:13,574] Trial 4522 pruned.
[I 2021-05-10 19:58:14,287] Trial 4523 pruned.
[I 2021-05-10 19:58:14,714] Trial 4524 pruned.
[I 2021-05-10 19:58:15,429] Trial 4525 pruned.
[I 2021-05-10 19:58:16,571] Trial 4526 pruned.
[I 2021-05-10 19:58:17,007] Trial 4527 pruned.
[I 2021-05-10 19:58:17,724] Trial 4528 pruned.
[I 2021-05-10 19:58:18,151] Trial 4529 pruned.
[I 2021-05-10 19:58:18,878] Trial 4530 pruned.
[I 2021-05-10 19:58:19,598] Trial 4531 pruned.
[I 2021-05-10 19:58:20,030] Trial 4532 pruned.
[I 2021-05-10 19:58:20,694] Trial 4533 pruned.
[I 2021-05-10 19:58:21,399] Trial 4534 pruned.
[I 2021-05-10 19:58:21,619] Trial 4535 pruned.
[I 2021-05-10 19:58:22,053] Trial 4536 pruned.
[I 2021-05-10 19:58:22,769] Trial 4537 pruned.
[I 2021-05-10 19:58:23,057] Trial 4538 pruned.
[I 2021-05-10 19:58:23,485] Trial 4539 pruned.
[I 2021-05-10 19:58:24,209] Trial 4540 pruned.
[I 2021-05-10 19:58:24,878] Trial 4541 pruned.
[I 2021-05-10 19:58:25,312] Trial 4542 pruned.
[I 2021-05-10 19:58:26,029] Trial 4543 pruned.
[I 2021-05-10 19:58:26,341] Trial 4544 pruned.
[I 2021-05-10 19:58:27,086] Trial 4545 pruned.
[I 2021-05-10 19:58:27,804] Trial 4546 pruned.
[I 2021-05-10 19:58:28,236] Trial 4547 pruned.
[I 2021-05-10 19:58:28,967] Trial 4548 pruned.
[I 2021-05-10 19:58:29,601] Trial 4549 pruned.
[I 2021-05-10 19:58:29,816] Trial 4550 pruned.
[I 2021-05-10 19:58:30,179] Trial 4551 pruned.
[I 2021-05-10 19:58:30,904] Trial 4552 pruned.
[I 2021-05-10 19:58:31,331] Trial 4553 pruned.
[I 2021-05-10 19:58:32,055] Trial 4554 pruned.
[I 2021-05-10 19:58:32,784] Trial 4555 pruned.
[I 2021-05-10 19:58:33,170] Trial 4556 pruned.
[I 2021-05-10 19:58:33,898] Trial 4557 pruned.
[I 2021-05-10 19:58:34,333] Trial 4558 pruned.
[I 2021-05-10 19:58:35,041] Trial 4559 pruned.
[I 2021-05-10 19:58:35,763] Trial 4560 pruned.
[I 2021-05-10 19:58:36,197] Trial 4561 pruned.
[I 2021-05-10 19:58:36,833] Trial 4562 pruned.
[I 2021-05-10 19:58:37,557] Trial 4563 pruned.
[I 2021-05-10 19:58:37,782] Trial 4564 pruned.
[I 2021-05-10 19:58:38,212] Trial 4565 pruned.
[I 2021-05-10 19:58:38,927] Trial 4566 pruned.
[I 2021-05-10 19:58:39,369] Trial 4567 pruned.
[I 2021-05-10 19:58:39,664] Trial 4568 pruned.
[I 2021-05-10 19:58:40,301] Trial 4569 pruned.
[I 2021-05-10 19:58:41,033] Trial 4570 pruned.
[I 2021-05-10 19:58:41,464] Trial 4571 pruned.
[I 2021-05-10 19:58:42,188] Trial 4572 pruned.
[I 2021-05-10 19:58:42,621] Trial 4573 pruned.
[I 2021-05-10 19:58:43,330] Trial 4574 pruned.
[I 2021-05-10 19:58:44,040] Trial 4575 pruned.
[I 2021-05-10 19:58:44,479] Trial 4576 pruned.
[I 2021-05-10 19:58:45,182] Trial 4577 pruned.
[I 2021-05-10 19:58:45,909] Trial 4578 pruned.
[I 2021-05-10 19:58:46,125] Trial 4579 pruned.
[I 2021-05-10 19:58:46,515] Trial 4580 pruned.
[I 2021-05-10 19:58:47,239] Trial 4581 pruned.
[I 2021-05-10 19:58:47,674] Trial 4582 pruned.
[I 2021-05-10 19:58:48,392] Trial 4583 pruned.
[I 2021-05-10 19:58:49,102] Trial 4584 pruned.
[I 2021-05-10 19:58:49,499] Trial 4585 pruned.
[I 2021-05-10 19:58:50,218] Trial 4586 pruned.
[I 2021-05-10 19:58:50,659] Trial 4587 pruned.
[I 2021-05-10 19:58:51,382] Trial 4588 pruned.
[I 2021-05-10 19:58:52,105] Trial 4589 pruned.
[I 2021-05-10 19:58:52,568] Trial 4590 pruned.
[I 2021-05-10 19:58:53,297] Trial 4591 pruned.
[I 2021-05-10 19:58:54,021] Trial 4592 pruned.
[I 2021-05-10 19:58:54,251] Trial 4593 pruned.
[I 2021-05-10 19:58:54,633] Trial 4594 pruned.
[I 2021-05-10 19:58:55,367] Trial 4595 pruned.
[I 2021-05-10 19:58:55,655] Trial 4596 pruned.
[I 2021-05-10 19:58:56,092] Trial 4597 pruned.
[I 2021-05-10 19:58:56,817] Trial 4598 pruned.
[I 2021-05-10 19:58:57,546] Trial 4599 pruned.
[I 2021-05-10 19:58:57,983] Trial 4600 pruned.
[I 2021-05-10 19:58:58,698] Trial 4601 pruned.
[I 2021-05-10 19:58:59,104] Trial 4602 pruned.
[I 2021-05-10 19:58:59,848] Trial 4603 pruned.
[I 2021-05-10 19:59:00,584] Trial 4604 pruned.
[I 2021-05-10 19:59:01,031] Trial 4605 pruned.
[I 2021-05-10 19:59:01,638] Trial 4606 pruned.
[I 2021-05-10 19:59:02,363] Trial 4607 pruned.
[I 2021-05-10 19:59:02,795] Trial 4608 pruned.
[I 2021-05-10 19:59:03,020] Trial 4609 pruned.
[I 2021-05-10 19:59:05,854] Trial 4610 pruned.
[I 2021-05-10 19:59:06,289] Trial 4611 pruned.
[I 2021-05-10 19:59:07,008] Trial 4612 pruned.
[I 2021-05-10 19:59:07,726] Trial 4613 pruned.
[I 2021-05-10 19:59:08,181] Trial 4614 pruned.
[I 2021-05-10 19:59:08,820] Trial 4615 pruned.
[I 2021-05-10 19:59:09,260] Trial 4616 pruned.
[I 2021-05-10 19:59:09,984] Trial 4617 pruned.
[I 2021-05-10 19:59:10,726] Trial 4618 pruned.
[I 2021-05-10 19:59:11,155] Trial 4619 pruned.
[I 2021-05-10 19:59:11,874] Trial 4620 pruned.
[I 2021-05-10 19:59:12,593] Trial 4621 pruned.
[I 2021-05-10 19:59:12,993] Trial 4622 pruned.
[I 2021-05-10 19:59:13,215] Trial 4623 pruned.
[I 2021-05-10 19:59:13,935] Trial 4624 pruned.
[I 2021-05-10 19:59:14,231] Trial 4625 pruned.
[I 2021-05-10 19:59:14,663] Trial 4626 pruned.
[I 2021-05-10 19:59:15,386] Trial 4627 pruned.
[I 2021-05-10 19:59:16,109] Trial 4628 pruned.
[I 2021-05-10 19:59:16,543] Trial 4629 pruned.
[I 2021-05-10 19:59:17,270] Trial 4630 pruned.
[I 2021-05-10 19:59:17,650] Trial 4631 pruned.
[I 2021-05-10 19:59:18,394] Trial 4632 pruned.
[I 2021-05-10 19:59:19,057] Trial 4633 pruned.
[I 2021-05-10 19:59:19,507] Trial 4634 pruned.
[I 2021-05-10 19:59:20,226] Trial 4635 pruned.
[I 2021-05-10 19:59:20,945] Trial 4636 pruned.
[I 2021-05-10 19:59:21,394] Trial 4637 pruned.
[I 2021-05-10 19:59:21,608] Trial 4638 pruned.
[I 2021-05-10 19:59:22,320] Trial 4639 pruned.
[I 2021-05-10 19:59:22,751] Trial 4640 pruned.
[I 2021-05-10 19:59:23,486] Trial 4641 pruned.
[I 2021-05-10 19:59:24,035] Trial 4642 pruned.
[I 2021-05-10 19:59:24,474] Trial 4643 pruned.
[I 2021-05-10 19:59:25,207] Trial 4644 pruned.
[I 2021-05-10 19:59:25,646] Trial 4645 pruned.
[I 2021-05-10 19:59:26,267] Trial 4646 pruned.
[I 2021-05-10 19:59:26,997] Trial 4647 pruned.
[I 2021-05-10 19:59:27,430] Trial 4648 pruned.
[I 2021-05-10 19:59:28,168] Trial 4649 pruned.
[I 2021-05-10 19:59:28,897] Trial 4650 pruned.
[I 2021-05-10 19:59:29,283] Trial 4651 pruned.
[I 2021-05-10 19:59:29,504] Trial 4652 pruned.
[I 2021-05-10 19:59:30,240] Trial 4653 pruned.
[I 2021-05-10 19:59:30,534] Trial 4654 pruned.
[I 2021-05-10 19:59:30,972] Trial 4655 pruned.
[I 2021-05-10 19:59:31,686] Trial 4656 pruned.
[I 2021-05-10 19:59:32,400] Trial 4657 pruned.
[I 2021-05-10 19:59:32,837] Trial 4658 pruned.
[I 2021-05-10 19:59:33,567] Trial 4659 pruned.
[I 2021-05-10 19:59:33,954] Trial 4660 pruned.
[I 2021-05-10 19:59:34,681] Trial 4661 pruned.
[I 2021-05-10 19:59:35,402] Trial 4662 pruned.
[I 2021-05-10 19:59:35,837] Trial 4663 pruned.
[I 2021-05-10 19:59:36,574] Trial 4664 pruned.
[I 2021-05-10 19:59:37,228] Trial 4665 pruned.
[I 2021-05-10 19:59:37,663] Trial 4666 pruned.
[I 2021-05-10 19:59:37,887] Trial 4667 pruned.
[I 2021-05-10 19:59:38,620] Trial 4668 pruned.
[I 2021-05-10 19:59:39,060] Trial 4669 pruned.
[I 2021-05-10 19:59:39,769] Trial 4670 pruned.
[I 2021-05-10 19:59:40,496] Trial 4671 pruned.
[I 2021-05-10 19:59:40,932] Trial 4672 pruned.
[I 2021-05-10 19:59:41,654] Trial 4673 pruned.
[I 2021-05-10 19:59:42,041] Trial 4674 pruned.
[I 2021-05-10 19:59:42,770] Trial 4675 pruned.
[I 2021-05-10 19:59:43,506] Trial 4676 pruned.
[I 2021-05-10 19:59:43,938] Trial 4677 pruned.
[I 2021-05-10 19:59:44,663] Trial 4678 pruned.
[I 2021-05-10 20:00:43,184] Trial 4679 finished with value: 195.83937072753906 and parameters: {'lr': 0.0013810129184697731, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 970, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 20:00:43,626] Trial 4680 pruned.
[I 2021-05-10 20:00:43,834] Trial 4681 pruned.
[I 2021-05-10 20:00:44,556] Trial 4682 pruned.
[I 2021-05-10 20:00:44,798] Trial 4683 pruned.
[I 2021-05-10 20:00:45,227] Trial 4684 pruned.
[I 2021-05-10 20:00:45,954] Trial 4685 pruned.
[I 2021-05-10 20:00:46,680] Trial 4686 pruned.
[I 2021-05-10 20:00:47,110] Trial 4687 pruned.
[I 2021-05-10 20:00:47,772] Trial 4688 pruned.
[I 2021-05-10 20:00:48,217] Trial 4689 pruned.
[I 2021-05-10 20:00:48,948] Trial 4690 pruned.
[I 2021-05-10 20:00:49,663] Trial 4691 pruned.
[I 2021-05-10 20:00:50,097] Trial 4692 pruned.
[I 2021-05-10 20:00:50,827] Trial 4693 pruned.
[I 2021-05-10 20:00:51,556] Trial 4694 pruned.
[I 2021-05-10 20:00:51,993] Trial 4695 pruned.
[I 2021-05-10 20:00:52,221] Trial 4696 pruned.
[I 2021-05-10 20:00:52,915] Trial 4697 pruned.
[I 2021-05-10 20:00:53,362] Trial 4698 pruned.
[I 2021-05-10 20:00:54,104] Trial 4699 pruned.
[I 2021-05-10 20:00:54,838] Trial 4700 pruned.
[I 2021-05-10 20:00:55,567] Trial 4701 pruned.
[I 2021-05-10 20:00:56,278] Trial 4702 pruned.
[I 2021-05-10 20:00:56,682] Trial 4703 pruned.
[I 2021-05-10 20:00:57,410] Trial 4704 pruned.
[I 2021-05-10 20:00:58,128] Trial 4705 pruned.
[I 2021-05-10 20:00:58,565] Trial 4706 pruned.
[I 2021-05-10 20:00:59,226] Trial 4707 pruned.
[I 2021-05-10 20:00:59,972] Trial 4708 pruned.
[I 2021-05-10 20:01:00,406] Trial 4709 pruned.
[I 2021-05-10 20:01:00,657] Trial 4710 pruned.
[I 2021-05-10 20:01:01,417] Trial 4711 pruned.
[I 2021-05-10 20:01:01,854] Trial 4712 pruned.
[I 2021-05-10 20:01:02,563] Trial 4713 pruned.
[I 2021-05-10 20:01:03,291] Trial 4714 pruned.
[I 2021-05-10 20:01:03,730] Trial 4715 pruned.
[I 2021-05-10 20:01:05,047] Trial 4716 pruned.
[I 2021-05-10 20:01:05,487] Trial 4717 pruned.
[I 2021-05-10 20:01:05,752] Trial 4718 pruned.
[I 2021-05-10 20:01:06,473] Trial 4719 pruned.
[I 2021-05-10 20:01:07,196] Trial 4720 pruned.
[I 2021-05-10 20:01:07,508] Trial 4721 pruned.
[I 2021-05-10 20:01:08,234] Trial 4722 pruned.
[I 2021-05-10 20:01:09,639] Trial 4723 pruned.
[I 2021-05-10 20:01:10,078] Trial 4724 pruned.
[I 2021-05-10 20:01:10,299] Trial 4725 pruned.
[I 2021-05-10 20:01:10,952] Trial 4726 pruned.
[I 2021-05-10 20:01:11,397] Trial 4727 pruned.
[I 2021-05-10 20:01:12,130] Trial 4728 pruned.
[I 2021-05-10 20:01:12,837] Trial 4729 pruned.
[I 2021-05-10 20:01:13,269] Trial 4730 pruned.
[I 2021-05-10 20:01:14,008] Trial 4731 pruned.
[I 2021-05-10 20:01:14,346] Trial 4732 pruned.
[I 2021-05-10 20:01:15,058] Trial 4733 pruned.
[I 2021-05-10 20:01:15,722] Trial 4734 pruned.
[I 2021-05-10 20:01:16,168] Trial 4735 pruned.
[I 2021-05-10 20:01:16,900] Trial 4736 pruned.
[I 2021-05-10 20:01:17,618] Trial 4737 pruned.
[I 2021-05-10 20:01:18,052] Trial 4738 pruned.
[I 2021-05-10 20:01:18,285] Trial 4739 pruned.
[I 2021-05-10 20:01:19,007] Trial 4740 pruned.
[I 2021-05-10 20:01:19,278] Trial 4741 pruned.
[I 2021-05-10 20:01:19,736] Trial 4742 pruned.
[I 2021-05-10 20:01:20,462] Trial 4743 pruned.
[I 2021-05-10 20:01:21,187] Trial 4744 pruned.
[I 2021-05-10 20:01:21,615] Trial 4745 pruned.
[I 2021-05-10 20:01:22,357] Trial 4746 pruned.
[I 2021-05-10 20:01:22,746] Trial 4747 pruned.
[I 2021-05-10 20:01:23,475] Trial 4748 pruned.
[I 2021-05-10 20:01:24,197] Trial 4749 pruned.
[I 2021-05-10 20:01:24,641] Trial 4750 pruned.
[I 2021-05-10 20:01:25,359] Trial 4751 pruned.
[I 2021-05-10 20:01:26,081] Trial 4752 pruned.
[I 2021-05-10 20:01:26,519] Trial 4753 pruned.
[I 2021-05-10 20:01:26,751] Trial 4754 pruned.
[I 2021-05-10 20:01:27,468] Trial 4755 pruned.
[I 2021-05-10 20:01:27,857] Trial 4756 pruned.
[I 2021-05-10 20:01:28,595] Trial 4757 pruned.
[I 2021-05-10 20:01:29,330] Trial 4758 pruned.
[I 2021-05-10 20:01:29,760] Trial 4759 pruned.
[I 2021-05-10 20:01:30,479] Trial 4760 pruned.
[I 2021-05-10 20:01:30,919] Trial 4761 pruned.
[I 2021-05-10 20:01:31,583] Trial 4762 pruned.
[I 2021-05-10 20:01:32,895] Trial 4763 pruned.
[I 2021-05-10 20:01:33,332] Trial 4764 pruned.
[I 2021-05-10 20:01:34,067] Trial 4765 pruned.
[I 2021-05-10 20:01:34,806] Trial 4766 pruned.
[I 2021-05-10 20:01:35,255] Trial 4767 pruned.
[I 2021-05-10 20:01:35,476] Trial 4768 pruned.
[I 2021-05-10 20:01:36,193] Trial 4769 pruned.
[I 2021-05-10 20:01:36,471] Trial 4770 pruned.
[I 2021-05-10 20:01:36,914] Trial 4771 pruned.
[I 2021-05-10 20:01:38,199] Trial 4772 pruned.
[I 2021-05-10 20:01:38,938] Trial 4773 pruned.
[I 2021-05-10 20:01:39,680] Trial 4774 pruned.
[I 2021-05-10 20:01:40,401] Trial 4775 pruned.
[I 2021-05-10 20:01:40,786] Trial 4776 pruned.
[I 2021-05-10 20:01:41,395] Trial 4777 pruned.
[I 2021-05-10 20:01:42,129] Trial 4778 pruned.
[I 2021-05-10 20:01:42,561] Trial 4779 pruned.
[I 2021-05-10 20:01:43,295] Trial 4780 pruned.
[I 2021-05-10 20:01:44,025] Trial 4781 pruned.
[I 2021-05-10 20:01:44,460] Trial 4782 pruned.
[I 2021-05-10 20:01:44,662] Trial 4783 pruned.
[I 2021-05-10 20:01:45,389] Trial 4784 pruned.
[I 2021-05-10 20:01:45,832] Trial 4785 pruned.
[I 2021-05-10 20:01:46,560] Trial 4786 pruned.
[I 2021-05-10 20:01:47,871] Trial 4787 pruned.
[I 2021-05-10 20:01:48,316] Trial 4788 pruned.
[I 2021-05-10 20:01:49,051] Trial 4789 pruned.
[I 2021-05-10 20:01:49,488] Trial 4790 pruned.
[I 2021-05-10 20:01:50,203] Trial 4791 pruned.
[I 2021-05-10 20:01:50,927] Trial 4792 pruned.
[I 2021-05-10 20:01:51,329] Trial 4793 pruned.
[I 2021-05-10 20:01:52,045] Trial 4794 pruned.
[I 2021-05-10 20:01:52,772] Trial 4795 pruned.
[I 2021-05-10 20:01:53,215] Trial 4796 pruned.
[I 2021-05-10 20:01:53,440] Trial 4797 pruned.
[I 2021-05-10 20:01:54,174] Trial 4798 pruned.
[I 2021-05-10 20:01:54,610] Trial 4799 pruned.
[I 2021-05-10 20:01:55,278] Trial 4800 pruned.
[I 2021-05-10 20:01:56,001] Trial 4801 pruned.
[I 2021-05-10 20:01:56,305] Trial 4802 pruned.
[I 2021-05-10 20:01:56,743] Trial 4803 pruned.
[I 2021-05-10 20:01:57,464] Trial 4804 pruned.
[I 2021-05-10 20:01:57,894] Trial 4805 pruned.
[I 2021-05-10 20:01:58,559] Trial 4806 pruned.
[I 2021-05-10 20:01:59,294] Trial 4807 pruned.
[I 2021-05-10 20:01:59,729] Trial 4808 pruned.
[I 2021-05-10 20:02:00,455] Trial 4809 pruned.
[I 2021-05-10 20:02:01,177] Trial 4810 pruned.
[I 2021-05-10 20:02:01,625] Trial 4811 pruned.
[I 2021-05-10 20:02:01,855] Trial 4812 pruned.
[I 2021-05-10 20:02:02,572] Trial 4813 pruned.
[I 2021-05-10 20:02:02,965] Trial 4814 pruned.
[I 2021-05-10 20:02:03,679] Trial 4815 pruned.
[I 2021-05-10 20:02:04,411] Trial 4816 pruned.
[I 2021-05-10 20:02:04,844] Trial 4817 pruned.
[I 2021-05-10 20:02:05,577] Trial 4818 pruned.
[I 2021-05-10 20:02:05,982] Trial 4819 pruned.
[I 2021-05-10 20:02:06,734] Trial 4820 pruned.
[I 2021-05-10 20:02:07,461] Trial 4821 pruned.
[I 2021-05-10 20:02:07,890] Trial 4822 pruned.
[I 2021-05-10 20:02:08,644] Trial 4823 pruned.
[I 2021-05-10 20:02:09,259] Trial 4824 pruned.
[I 2021-05-10 20:02:09,691] Trial 4825 pruned.
[I 2021-05-10 20:02:09,920] Trial 4826 pruned.
[I 2021-05-10 20:02:10,659] Trial 4827 pruned.
[I 2021-05-10 20:02:11,046] Trial 4828 pruned.
[I 2021-05-10 20:02:11,790] Trial 4829 pruned.
[I 2021-05-10 20:02:12,525] Trial 4830 pruned.
[I 2021-05-10 20:02:12,826] Trial 4831 pruned.
[I 2021-05-10 20:02:13,227] Trial 4832 pruned.
[I 2021-05-10 20:02:13,949] Trial 4833 pruned.
[I 2021-05-10 20:02:14,662] Trial 4834 pruned.
[I 2021-05-10 20:02:15,100] Trial 4835 pruned.
[I 2021-05-10 20:02:15,831] Trial 4836 pruned.
[I 2021-05-10 20:02:16,264] Trial 4837 pruned.
[I 2021-05-10 20:02:16,981] Trial 4838 pruned.
[I 2021-05-10 20:03:17,315] Trial 4839 finished with value: 168.90460205078125 and parameters: {'lr': 0.002764520190083518, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 992, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'relu'}. Best is trial 1927 with value: 158.8334197998047.
[I 2021-05-10 20:03:17,756] Trial 4840 pruned.
[I 2021-05-10 20:03:17,960] Trial 4841 pruned.
[I 2021-05-10 20:03:18,691] Trial 4842 pruned.
[I 2021-05-10 20:03:19,134] Trial 4843 pruned.
[I 2021-05-10 20:03:19,866] Trial 4844 pruned.
[I 2021-05-10 20:03:20,583] Trial 4845 pruned.
[I 2021-05-10 20:03:21,025] Trial 4846 pruned.
[I 2021-05-10 20:03:21,778] Trial 4847 pruned.
[I 2021-05-10 20:03:22,432] Trial 4848 pruned.
[I 2021-05-10 20:03:22,876] Trial 4849 pruned.
[I 2021-05-10 20:03:24,166] Trial 4850 pruned.
[I 2021-05-10 20:03:24,610] Trial 4851 pruned.
[I 2021-05-10 20:03:26,509] Trial 4852 pruned.
[I 2021-05-10 20:03:27,219] Trial 4853 pruned.
[I 2021-05-10 20:03:27,624] Trial 4854 pruned.
[I 2021-05-10 20:03:27,857] Trial 4855 pruned.
[I 2021-05-10 20:03:28,378] Trial 4856 pruned.
[I 2021-05-10 20:03:28,820] Trial 4857 pruned.
[I 2021-05-10 20:03:29,119] Trial 4858 pruned.
[I 2021-05-10 20:03:29,846] Trial 4859 pruned.
[I 2021-05-10 20:03:31,156] Trial 4860 pruned.
[I 2021-05-10 20:03:31,625] Trial 4861 pruned.
[I 2021-05-10 20:03:32,346] Trial 4862 pruned.
[I 2021-05-10 20:03:33,071] Trial 4863 pruned.
[I 2021-05-10 20:03:33,513] Trial 4864 pruned.
[I 2021-05-10 20:03:34,246] Trial 4865 pruned.
[I 2021-05-10 20:03:34,615] Trial 4866 pruned.
[I 2021-05-10 20:03:35,283] Trial 4867 pruned.
[I 2021-05-10 20:03:36,016] Trial 4868 pruned.
[I 2021-05-10 20:03:36,460] Trial 4869 pruned.
[I 2021-05-10 20:03:36,686] Trial 4870 pruned.
[I 2021-05-10 20:03:37,422] Trial 4871 pruned.
[I 2021-05-10 20:03:37,864] Trial 4872 pruned.
[I 2021-05-10 20:03:38,529] Trial 4873 pruned.
[I 2021-05-10 20:03:39,261] Trial 4874 pruned.
[I 2021-05-10 20:03:39,703] Trial 4875 pruned.
[I 2021-05-10 20:03:40,430] Trial 4876 pruned.
[I 2021-05-10 20:03:41,755] Trial 4877 pruned.
[I 2021-05-10 20:03:42,196] Trial 4878 pruned.
[I 2021-05-10 20:03:42,906] Trial 4879 pruned.
[I 2021-05-10 20:03:43,303] Trial 4880 pruned.
[I 2021-05-10 20:03:44,021] Trial 4881 pruned.
[I 2021-05-10 20:03:44,768] Trial 4882 pruned.
[I 2021-05-10 20:03:45,203] Trial 4883 pruned.
[I 2021-05-10 20:03:45,427] Trial 4884 pruned.
[I 2021-05-10 20:03:46,174] Trial 4885 pruned.
[I 2021-05-10 20:03:46,569] Trial 4886 pruned.
[I 2021-05-10 20:03:47,860] Trial 4887 pruned.
[I 2021-05-10 20:03:48,165] Trial 4888 pruned.
[I 2021-05-10 20:03:48,886] Trial 4889 pruned.
[I 2021-05-10 20:03:49,327] Trial 4890 pruned.
[I 2021-05-10 20:03:50,061] Trial 4891 pruned.
[I 2021-05-10 20:03:50,797] Trial 4892 pruned.
[I 2021-05-10 20:03:51,240] Trial 4893 pruned.
[I 2021-05-10 20:03:51,979] Trial 4894 pruned.
[I 2021-05-10 20:03:52,390] Trial 4895 pruned.
[I 2021-05-10 20:03:53,117] Trial 4896 pruned.
[I 2021-05-10 20:03:53,823] Trial 4897 pruned.
[I 2021-05-10 20:03:54,273] Trial 4898 pruned.
[I 2021-05-10 20:03:54,504] Trial 4899 pruned.
[I 2021-05-10 20:03:55,230] Trial 4900 pruned.
[I 2021-05-10 20:03:55,621] Trial 4901 pruned.
[I 2021-05-10 20:03:56,918] Trial 4902 pruned.
[I 2021-05-10 20:03:57,656] Trial 4903 pruned.
[I 2021-05-10 20:03:58,097] Trial 4904 pruned.
[I 2021-05-10 20:03:58,840] Trial 4905 pruned.
[I 2021-05-10 20:03:59,564] Trial 4906 pruned.
[I 2021-05-10 20:04:00,014] Trial 4907 pruned.
[I 2021-05-10 20:04:00,686] Trial 4908 pruned.
[I 2021-05-10 20:04:01,132] Trial 4909 pruned.
[I 2021-05-10 20:04:01,867] Trial 4910 pruned.
[I 2021-05-10 20:04:02,582] Trial 4911 pruned.
[I 2021-05-10 20:04:03,025] Trial 4912 pruned.
[I 2021-05-10 20:04:03,767] Trial 4913 pruned.
[I 2021-05-10 20:04:03,995] Trial 4914 pruned.
[I 2021-05-10 20:04:04,429] Trial 4915 pruned.
[I 2021-05-10 20:04:04,988] Trial 4916 pruned.
[I 2021-05-10 20:04:05,265] Trial 4917 pruned.
[I 2021-05-10 20:04:05,981] Trial 4918 pruned.
[I 2021-05-10 20:04:06,425] Trial 4919 pruned.
[I 2021-05-10 20:04:07,154] Trial 4920 pruned.
[I 2021-05-10 20:04:08,468] Trial 4921 pruned.
[I 2021-05-10 20:04:08,919] Trial 4922 pruned.
[I 2021-05-10 20:04:09,589] Trial 4923 pruned.
[I 2021-05-10 20:04:10,032] Trial 4924 pruned.
[I 2021-05-10 20:04:10,774] Trial 4925 pruned.
[I 2021-05-10 20:04:12,672] Trial 4926 pruned.
[I 2021-05-10 20:04:13,118] Trial 4927 pruned.
[I 2021-05-10 20:04:13,780] Trial 4928 pruned.
[I 2021-05-10 20:04:14,010] Trial 4929 pruned.
[I 2021-05-10 20:04:14,452] Trial 4930 pruned.
[I 2021-05-10 20:04:15,191] Trial 4931 pruned.
[I 2021-05-10 20:04:15,924] Trial 4932 pruned.
[I 2021-05-10 20:04:16,368] Trial 4933 pruned.
[I 2021-05-10 20:04:17,091] Trial 4934 pruned.
[I 2021-05-10 20:04:17,826] Trial 4935 pruned.
[I 2021-05-10 20:04:18,271] Trial 4936 pruned.
[I 2021-05-10 20:04:19,003] Trial 4937 pruned.
[I 2021-05-10 20:04:19,393] Trial 4938 pruned.
[I 2021-05-10 20:04:20,131] Trial 4939 pruned.
[I 2021-05-10 20:04:20,878] Trial 4940 pruned.
[I 2021-05-10 20:04:21,322] Trial 4941 pruned.
[I 2021-05-10 20:04:21,554] Trial 4942 pruned.
[I 2021-05-10 20:04:22,199] Trial 4943 pruned.
[I 2021-05-10 20:04:22,634] Trial 4944 pruned.
[I 2021-05-10 20:04:23,362] Trial 4945 pruned.
[I 2021-05-10 20:04:23,668] Trial 4946 pruned.
[I 2021-05-10 20:04:24,399] Trial 4947 pruned.
[I 2021-05-10 20:04:25,145] Trial 4948 pruned.
[I 2021-05-10 20:04:25,888] Trial 4949 pruned.
[I 2021-05-10 20:04:26,617] Trial 4950 pruned.
[I 2021-05-10 20:04:27,063] Trial 4951 pruned.
[I 2021-05-10 20:04:27,797] Trial 4952 pruned.
[I 2021-05-10 20:04:28,204] Trial 4953 pruned.
[I 2021-05-10 20:04:28,936] Trial 4954 pruned.
[I 2021-05-10 20:04:29,605] Trial 4955 pruned.
[I 2021-05-10 20:04:30,051] Trial 4956 pruned.
[I 2021-05-10 20:04:30,280] Trial 4957 pruned.
[I 2021-05-10 20:04:31,010] Trial 4958 pruned.
[I 2021-05-10 20:04:31,462] Trial 4959 pruned.
[I 2021-05-10 20:04:32,178] Trial 4960 pruned.
[I 2021-05-10 20:04:32,738] Trial 4961 pruned.
[I 2021-05-10 20:04:33,188] Trial 4962 pruned.
[I 2021-05-10 20:04:33,931] Trial 4963 pruned.
[I 2021-05-10 20:04:34,652] Trial 4964 pruned.
[I 2021-05-10 20:04:35,094] Trial 4965 pruned.
[I 2021-05-10 20:04:35,820] Trial 4966 pruned.
[I 2021-05-10 20:04:36,274] Trial 4967 pruned.
[I 2021-05-10 20:04:36,935] Trial 4968 pruned.
[I 2021-05-10 20:04:37,666] Trial 4969 pruned.
[I 2021-05-10 20:04:38,113] Trial 4970 pruned.
[I 2021-05-10 20:04:38,340] Trial 4971 pruned.
[I 2021-05-10 20:04:46,690] Trial 4972 pruned.
[I 2021-05-10 20:04:47,131] Trial 4973 pruned.
[I 2021-05-10 20:04:47,854] Trial 4974 pruned.
[I 2021-05-10 20:04:48,519] Trial 4975 pruned.
[I 2021-05-10 20:04:48,821] Trial 4976 pruned.
[I 2021-05-10 20:04:49,273] Trial 4977 pruned.
[I 2021-05-10 20:04:50,006] Trial 4978 pruned.
[I 2021-05-10 20:04:50,735] Trial 4979 pruned.
[I 2021-05-10 20:04:51,170] Trial 4980 pruned.
[I 2021-05-10 20:04:52,365] Trial 4981 pruned.
[I 2021-05-10 20:04:53,115] Trial 4982 pruned.
[I 2021-05-10 20:04:53,865] Trial 4983 pruned.
[I 2021-05-10 20:04:54,599] Trial 4984 pruned.
[I 2021-05-10 20:04:54,996] Trial 4985 pruned.
[I 2021-05-10 20:04:55,235] Trial 4986 pruned.
[I 2021-05-10 20:04:55,954] Trial 4987 pruned.
[I 2021-05-10 20:04:56,402] Trial 4988 pruned.
[I 2021-05-10 20:04:57,125] Trial 4989 pruned.
[I 2021-05-10 20:04:57,868] Trial 4990 pruned.
[I 2021-05-10 20:04:58,309] Trial 4991 pruned.
[I 2021-05-10 20:04:59,058] Trial 4992 pruned.
[I 2021-05-10 20:04:59,809] Trial 4993 pruned.
[I 2021-05-10 20:05:00,272] Trial 4994 pruned.
[I 2021-05-10 20:05:00,958] Trial 4995 pruned.
[I 2021-05-10 20:05:01,423] Trial 4996 pruned.
[I 2021-05-10 20:05:02,158] Trial 4997 pruned.
[I 2021-05-10 20:05:02,782] Trial 4998 pruned.
[I 2021-05-10 20:05:03,231] Trial 4999 pruned.
Wall time: 2h 11min 12s
minmax_trials_df = scaled_study.trials_dataframe()
minmax_trials_df
| number | value | datetime_start | datetime_complete | duration | params_HL0_ac_fn | params_HL1_ac_fn | params_HL2_ac_fn | params_HL3_ac_fn | params_HL4_ac_fn | params_batch_size | params_lr | params_n_layers | params_neurons_HL1 | state | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 175.717407 | 2021-05-10 17:53:50.560322 | 2021-05-10 17:54:27.437748 | 0 days 00:00:36.877426 | relu | relu | NaN | NaN | NaN | 16 | 0.005612 | 2 | 60 | COMPLETE |
| 1 | 1 | 184.403442 | 2021-05-10 17:54:27.439743 | 2021-05-10 17:55:11.703202 | 0 days 00:00:44.263459 | relu | relu | linear | NaN | NaN | 16 | 0.087060 | 3 | 538 | COMPLETE |
| 2 | 2 | 165.814972 | 2021-05-10 17:55:11.705197 | 2021-05-10 17:55:47.612283 | 0 days 00:00:35.907086 | relu | linear | NaN | NaN | NaN | 16 | 0.008168 | 2 | 624 | COMPLETE |
| 3 | 3 | 239.189392 | 2021-05-10 17:55:47.613281 | 2021-05-10 17:55:56.524461 | 0 days 00:00:08.911180 | linear | linear | NaN | NaN | NaN | 64 | 0.041380 | 2 | 508 | COMPLETE |
| 4 | 4 | 161.500671 | 2021-05-10 17:55:56.526455 | 2021-05-10 17:56:04.966026 | 0 days 00:00:08.439571 | relu | relu | relu | linear | linear | 128 | 0.004202 | 5 | 964 | COMPLETE |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 4995 | 4995 | 201.781219 | 2021-05-10 20:05:00.274174 | 2021-05-10 20:05:00.958345 | 0 days 00:00:00.684171 | relu | relu | linear | relu | NaN | 16 | 0.003162 | 4 | 976 | PRUNED |
| 4996 | 4996 | 204.548477 | 2021-05-10 20:05:00.960340 | 2021-05-10 20:05:01.423103 | 0 days 00:00:00.462763 | relu | relu | linear | linear | linear | 32 | 0.001581 | 5 | 1004 | PRUNED |
| 4997 | 4997 | 214.939774 | 2021-05-10 20:05:01.425098 | 2021-05-10 20:05:02.158138 | 0 days 00:00:00.733040 | relu | relu | linear | linear | linear | 16 | 0.003716 | 5 | 684 | PRUNED |
| 4998 | 4998 | 194.739380 | 2021-05-10 20:05:02.160133 | 2021-05-10 20:05:02.782470 | 0 days 00:00:00.622337 | relu | linear | relu | NaN | NaN | 16 | 0.001869 | 3 | 1024 | PRUNED |
| 4999 | 4999 | 209.052216 | 2021-05-10 20:05:02.784465 | 2021-05-10 20:05:03.230273 | 0 days 00:00:00.445808 | relu | relu | linear | linear | linear | 32 | 0.002496 | 5 | 696 | PRUNED |
5000 rows × 15 columns
minmax_trials_df.to_pickle('dnn_trials/' + minmax_study_name + '_df.pkl')
# save the study for resuming later:
joblib.dump(scaled_study, "dnn_trials/" + minmax_study_name + '_study.pkl')
['dnn_trials/minmax_45_vars_half_layer_each_time_study.pkl']
graph = optuna.visualization.plot_parallel_coordinate(scaled_study)
graph.write_image("dnn_trials/" + minmax_study_name + '_plot.jpeg')
graph.show()
optuna.visualization.plot_optimization_history(scaled_study)
optuna.visualization.plot_intermediate_values(scaled_study)
%%time
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
mape_train = []
mape_scores = []
rmse_train = []
rmse_scores = []
mae_train = []
mae_scores = []
r2_train = []
r2_scores = []
n_epochs = 1000
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def tuned_minmax_investigation(trial):
for key, value in trial.params.items():
print(f" {key}: {value}")
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_float("lr", 1e-3, 1e-1, log=True)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
n_layers = trial.suggest_int('n_layers', 2, 5)
layers = []
in_features = 45
out_features = 0
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
layers.append(torch.nn.Linear(out_features, 1))
dnn_model = torch.nn.Sequential(*layers).to(device)
# now we save the model:
with open('dnn_trials/' + minmax_study_name + f"_trial{trial.number}.pickle", 'wb') as fout:
pickle.dump(dnn_model, fout)
print(dnn_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(dnn_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
dnn_model.train()
# we create a random permutation of numbers from MM_X_train.size()
permutation = torch.randperm(MM_X_train.size()[0])
# we go through the batches.
for i in range(0, MM_X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = MM_X_train[indices], MM_Y_train[indices]
# input MM_X_train into dnn and get predictions.
train_prediction = dnn_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
train_mape = MAPE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_rmse = RMSE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_r2 = R2_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
mae_train.append(train_loss.item())
mape_train.append(train_mape.item())
rmse_train.append(train_rmse.item())
r2_train.append(train_r2.item())
# now we test the model:
dnn_model.eval()
test_prediction = dnn_model(MM_X_test.to(device))
test_loss = loss_fn(test_prediction, MM_Y_test.to(device))
test_mape = MAPE_pytorch(MM_Y_test.to('cpu'), test_prediction.to('cpu'))
test_rmse = RMSE_pytorch(MM_Y_test.to('cpu'), test_prediction.to('cpu'))
test_r2 = R2_pytorch(MM_Y_test.to('cpu'), test_prediction.to('cpu'))
mae_scores.append(test_loss.item())
mape_scores.append(test_mape.item())
rmse_scores.append(test_rmse.item())
r2_scores.append(test_r2.item())
print(f"Epoch{epoch+1}\ttrain_loss={train_loss};\ttest_loss={test_loss}")
# if the current test loss determines the trial to be prunable, we prune it:
trial.report(test_loss, step=epoch)
if trial.should_prune():
raise optuna.TrialPruned()
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return mae_scores, mape_scores, rmse_scores, r2_scores
mae_scores, mape_scores, rmse_scores, r2_scores = tuned_minmax_investigation(scaled_study.best_trial)
print("\n\n------------------------------------------- TRAINING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_train)} +/- {np.std(mae_train)}")
print(f"Overall RMSE: {np.mean(rmse_train)} +/- {np.std(rmse_train)}")
print(f"Overall MAPE: {np.mean(mape_train)} +/- {np.std(mape_train)}")
print(f"Overall R2: {np.mean(r2_train)*100} +/- {np.std(r2_train)*100}")
print("\n\n------------------------------------------- TESTING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_scores)} +/- {np.std(mae_scores)}")
print(f"Overall RMSE: {np.mean(rmse_scores)} +/- {np.std(rmse_scores)}")
print(f"Overall MAPE: {np.mean(mape_scores)} +/- {np.std(mape_scores)}")
print(f"Overall R2: {np.mean(r2_scores)*100} +/- {np.std(r2_scores)*100}\n\n")
lr: 0.0012782353276565707
batch_size: 16
n_layers: 5
neurons_HL1: 638
HL0_ac_fn: relu
HL1_ac_fn: relu
HL2_ac_fn: linear
HL3_ac_fn: linear
HL4_ac_fn: linear
Sequential(
(0): Linear(in_features=45, out_features=638, bias=True)
(1): ReLU()
(2): Linear(in_features=638, out_features=319, bias=True)
(3): ReLU()
(4): Linear(in_features=319, out_features=159, bias=True)
(5): Linear(in_features=159, out_features=79, bias=True)
(6): Linear(in_features=79, out_features=39, bias=True)
(7): Linear(in_features=39, out_features=1, bias=True)
)
Epoch1 train_loss=75.70457458496094; test_loss=247.663818359375
Epoch2 train_loss=263.2696533203125; test_loss=204.8736572265625
Epoch3 train_loss=412.3106689453125; test_loss=183.12872314453125
Epoch4 train_loss=161.14906311035156; test_loss=183.70025634765625
Epoch5 train_loss=130.32325744628906; test_loss=199.20741271972656
Epoch6 train_loss=178.5799560546875; test_loss=178.86874389648438
Epoch7 train_loss=139.3069305419922; test_loss=200.29208374023438
Epoch8 train_loss=185.7705078125; test_loss=174.54083251953125
Epoch9 train_loss=182.2740936279297; test_loss=183.92578125
Epoch10 train_loss=107.91116333007812; test_loss=186.00442504882812
Epoch11 train_loss=141.6398468017578; test_loss=176.0626678466797
Epoch12 train_loss=493.0028991699219; test_loss=209.4246826171875
Epoch13 train_loss=61.501365661621094; test_loss=171.79696655273438
Epoch14 train_loss=128.53785705566406; test_loss=171.048095703125
Epoch15 train_loss=120.34679412841797; test_loss=197.8784942626953
Epoch16 train_loss=141.60499572753906; test_loss=170.5978546142578
Epoch17 train_loss=191.920166015625; test_loss=182.28372192382812
Epoch18 train_loss=92.99557495117188; test_loss=172.73919677734375
Epoch19 train_loss=46.64699935913086; test_loss=167.0225372314453
Epoch20 train_loss=247.24143981933594; test_loss=175.44505310058594
Epoch21 train_loss=251.38092041015625; test_loss=185.96409606933594
Epoch22 train_loss=101.30036163330078; test_loss=199.7103729248047
Epoch23 train_loss=116.98512268066406; test_loss=188.19586181640625
Epoch24 train_loss=474.5577392578125; test_loss=167.48912048339844
Epoch25 train_loss=73.25444030761719; test_loss=179.53099060058594
Epoch26 train_loss=477.9288330078125; test_loss=192.0853729248047
Epoch27 train_loss=324.501708984375; test_loss=173.1494903564453
Epoch28 train_loss=93.8862075805664; test_loss=168.48638916015625
Epoch29 train_loss=61.75487518310547; test_loss=165.45021057128906
Epoch30 train_loss=208.5587158203125; test_loss=165.11593627929688
Epoch31 train_loss=63.808380126953125; test_loss=199.2873077392578
Epoch32 train_loss=59.79205322265625; test_loss=176.0612335205078
Epoch33 train_loss=172.16708374023438; test_loss=166.9352569580078
Epoch34 train_loss=127.72396850585938; test_loss=170.42645263671875
Epoch35 train_loss=59.4090576171875; test_loss=170.10618591308594
Epoch36 train_loss=194.65060424804688; test_loss=169.0359649658203
Epoch37 train_loss=32.876075744628906; test_loss=167.61273193359375
Epoch38 train_loss=43.07868957519531; test_loss=166.66029357910156
Epoch39 train_loss=107.16871643066406; test_loss=161.6720428466797
Epoch40 train_loss=77.75640869140625; test_loss=167.33975219726562
Epoch41 train_loss=16.286081314086914; test_loss=171.4678955078125
Epoch42 train_loss=154.7318572998047; test_loss=169.21226501464844
Epoch43 train_loss=308.40191650390625; test_loss=166.94448852539062
Epoch44 train_loss=100.50343322753906; test_loss=167.73870849609375
Epoch45 train_loss=252.99856567382812; test_loss=163.90333557128906
Epoch46 train_loss=35.16017150878906; test_loss=162.5879364013672
Epoch47 train_loss=412.146240234375; test_loss=173.87664794921875
Epoch48 train_loss=66.17057800292969; test_loss=161.96055603027344
Epoch49 train_loss=109.94047546386719; test_loss=162.32115173339844
Epoch50 train_loss=422.46075439453125; test_loss=163.5623016357422
Epoch51 train_loss=285.5521240234375; test_loss=163.33900451660156
Epoch52 train_loss=204.23863220214844; test_loss=173.2054443359375
Epoch53 train_loss=196.2372283935547; test_loss=163.16317749023438
Epoch54 train_loss=180.62496948242188; test_loss=171.49237060546875
Epoch55 train_loss=62.630577087402344; test_loss=165.19134521484375
Epoch56 train_loss=151.65005493164062; test_loss=173.43475341796875
Epoch57 train_loss=13.472633361816406; test_loss=173.9818572998047
Epoch58 train_loss=214.47476196289062; test_loss=163.70518493652344
Epoch59 train_loss=69.97589111328125; test_loss=183.9188232421875
Epoch60 train_loss=62.27467346191406; test_loss=161.46755981445312
Epoch61 train_loss=39.07469177246094; test_loss=169.4269256591797
Epoch62 train_loss=100.6807861328125; test_loss=163.65167236328125
Epoch63 train_loss=308.7649841308594; test_loss=166.84765625
Epoch64 train_loss=37.777191162109375; test_loss=163.510498046875
Epoch65 train_loss=142.13131713867188; test_loss=182.96409606933594
Epoch66 train_loss=33.413108825683594; test_loss=169.60523986816406
Epoch67 train_loss=94.76138305664062; test_loss=178.13815307617188
Epoch68 train_loss=87.76373291015625; test_loss=175.0691375732422
Epoch69 train_loss=43.219337463378906; test_loss=168.379638671875
Epoch70 train_loss=321.71673583984375; test_loss=161.6463165283203
Epoch71 train_loss=58.909156799316406; test_loss=163.8381805419922
Epoch72 train_loss=67.00660705566406; test_loss=159.8485565185547
Epoch73 train_loss=21.21393585205078; test_loss=160.646484375
Epoch74 train_loss=54.837181091308594; test_loss=167.27516174316406
Epoch75 train_loss=75.11746978759766; test_loss=161.6029815673828
Epoch76 train_loss=219.9094696044922; test_loss=164.9193878173828
Epoch77 train_loss=137.75697326660156; test_loss=173.74383544921875
Epoch78 train_loss=243.76193237304688; test_loss=174.65634155273438
Epoch79 train_loss=180.51895141601562; test_loss=162.29025268554688
Epoch80 train_loss=346.0010986328125; test_loss=162.51480102539062
Epoch81 train_loss=117.18855285644531; test_loss=171.63485717773438
Epoch82 train_loss=85.90312194824219; test_loss=163.74794006347656
Epoch83 train_loss=129.93804931640625; test_loss=161.0546112060547
Epoch84 train_loss=79.7008056640625; test_loss=166.54063415527344
Epoch85 train_loss=206.5786590576172; test_loss=163.71755981445312
Epoch86 train_loss=105.83788299560547; test_loss=206.2197265625
Epoch87 train_loss=179.26312255859375; test_loss=175.228759765625
Epoch88 train_loss=127.27751922607422; test_loss=184.47869873046875
Epoch89 train_loss=65.87262725830078; test_loss=173.23118591308594
Epoch90 train_loss=373.99652099609375; test_loss=161.36111450195312
Epoch91 train_loss=181.9956512451172; test_loss=167.85635375976562
Epoch92 train_loss=55.999717712402344; test_loss=160.41610717773438
Epoch93 train_loss=91.72183227539062; test_loss=166.00636291503906
Epoch94 train_loss=155.2834930419922; test_loss=160.80068969726562
Epoch95 train_loss=78.55410766601562; test_loss=159.5791473388672
Epoch96 train_loss=175.85122680664062; test_loss=167.3440704345703
Epoch97 train_loss=193.30828857421875; test_loss=160.83299255371094
Epoch98 train_loss=133.56549072265625; test_loss=158.41778564453125
Epoch99 train_loss=209.24851989746094; test_loss=164.0442657470703
Epoch100 train_loss=34.892913818359375; test_loss=158.60491943359375
Epoch101 train_loss=91.31553649902344; test_loss=160.5886688232422
Epoch102 train_loss=170.6646270751953; test_loss=166.8235626220703
Epoch103 train_loss=414.6025390625; test_loss=160.89622497558594
Epoch104 train_loss=86.47952270507812; test_loss=165.43048095703125
Epoch105 train_loss=217.62161254882812; test_loss=168.25347900390625
Epoch106 train_loss=71.53933715820312; test_loss=158.64060974121094
Epoch107 train_loss=121.11442565917969; test_loss=161.04551696777344
Epoch108 train_loss=132.90240478515625; test_loss=171.61917114257812
Epoch109 train_loss=243.31399536132812; test_loss=159.4439239501953
Epoch110 train_loss=27.016265869140625; test_loss=164.2459716796875
Epoch111 train_loss=94.17379760742188; test_loss=172.47935485839844
Epoch112 train_loss=259.35516357421875; test_loss=185.4591827392578
Epoch113 train_loss=26.381698608398438; test_loss=162.95433044433594
Epoch114 train_loss=78.73567962646484; test_loss=161.31228637695312
Epoch115 train_loss=117.56346893310547; test_loss=163.1212615966797
Epoch116 train_loss=196.76968383789062; test_loss=160.25250244140625
Epoch117 train_loss=106.63201904296875; test_loss=161.4456024169922
Epoch118 train_loss=139.9998016357422; test_loss=169.15963745117188
Epoch119 train_loss=81.5022964477539; test_loss=166.2342071533203
Epoch120 train_loss=163.36346435546875; test_loss=168.53334045410156
Epoch121 train_loss=48.09291076660156; test_loss=159.15435791015625
Epoch122 train_loss=78.92645263671875; test_loss=163.6394500732422
Epoch123 train_loss=219.43215942382812; test_loss=159.0744171142578
Epoch124 train_loss=145.7825469970703; test_loss=160.0835723876953
Epoch125 train_loss=111.99613952636719; test_loss=200.929931640625
Epoch126 train_loss=211.24179077148438; test_loss=160.77517700195312
Epoch127 train_loss=47.27409362792969; test_loss=158.24717712402344
Epoch128 train_loss=56.98500061035156; test_loss=177.49671936035156
Epoch129 train_loss=129.21405029296875; test_loss=183.6824951171875
Epoch130 train_loss=428.10675048828125; test_loss=158.4635009765625
Epoch131 train_loss=68.42982482910156; test_loss=164.56056213378906
Epoch132 train_loss=229.99429321289062; test_loss=165.32391357421875
Epoch133 train_loss=136.66253662109375; test_loss=161.99159240722656
Epoch134 train_loss=130.73866271972656; test_loss=167.1224822998047
Epoch135 train_loss=60.450252532958984; test_loss=157.30865478515625
Epoch136 train_loss=226.19070434570312; test_loss=160.9291229248047
Epoch137 train_loss=330.2198181152344; test_loss=160.04776000976562
Epoch138 train_loss=66.67391204833984; test_loss=166.03465270996094
Epoch139 train_loss=307.20294189453125; test_loss=158.8902587890625
Epoch140 train_loss=125.32147979736328; test_loss=159.73838806152344
Epoch141 train_loss=95.3260498046875; test_loss=157.94813537597656
Epoch142 train_loss=36.00907897949219; test_loss=157.5999298095703
Epoch143 train_loss=51.80188751220703; test_loss=158.24256896972656
Epoch144 train_loss=86.08739471435547; test_loss=177.25106811523438
Epoch145 train_loss=340.0122375488281; test_loss=168.49107360839844
Epoch146 train_loss=495.0157470703125; test_loss=158.677978515625
Epoch147 train_loss=29.09326934814453; test_loss=156.76205444335938
Epoch148 train_loss=141.845703125; test_loss=159.5399932861328
Epoch149 train_loss=202.98707580566406; test_loss=158.2216033935547
Epoch150 train_loss=57.26333236694336; test_loss=179.37396240234375
Epoch151 train_loss=104.74656677246094; test_loss=176.81922912597656
Epoch152 train_loss=95.67881774902344; test_loss=158.5897216796875
Epoch153 train_loss=111.73926544189453; test_loss=159.8252716064453
Epoch154 train_loss=86.77349853515625; test_loss=156.9349365234375
Epoch155 train_loss=107.15826416015625; test_loss=187.1204376220703
Epoch156 train_loss=237.16183471679688; test_loss=157.61630249023438
Epoch157 train_loss=137.8391876220703; test_loss=174.32142639160156
Epoch158 train_loss=78.1668701171875; test_loss=157.255859375
Epoch159 train_loss=190.82127380371094; test_loss=175.22308349609375
Epoch160 train_loss=228.55935668945312; test_loss=164.3087615966797
Epoch161 train_loss=255.49107360839844; test_loss=158.8656768798828
Epoch162 train_loss=155.91030883789062; test_loss=160.1742706298828
Epoch163 train_loss=332.1170959472656; test_loss=156.28817749023438
Epoch164 train_loss=140.91314697265625; test_loss=159.79928588867188
Epoch165 train_loss=44.376930236816406; test_loss=158.37875366210938
Epoch166 train_loss=68.63391876220703; test_loss=159.97389221191406
Epoch167 train_loss=200.52093505859375; test_loss=161.87210083007812
Epoch168 train_loss=77.5337905883789; test_loss=159.9298095703125
Epoch169 train_loss=200.48634338378906; test_loss=158.5584259033203
Epoch170 train_loss=183.89447021484375; test_loss=167.1115264892578
Epoch171 train_loss=44.36933898925781; test_loss=160.2401885986328
Epoch172 train_loss=31.1934814453125; test_loss=159.65255737304688
Epoch173 train_loss=143.06536865234375; test_loss=159.13011169433594
Epoch174 train_loss=143.26040649414062; test_loss=156.99819946289062
Epoch175 train_loss=108.685546875; test_loss=157.05955505371094
Epoch176 train_loss=84.63397979736328; test_loss=161.6768798828125
Epoch177 train_loss=415.0722961425781; test_loss=168.1226348876953
Epoch178 train_loss=124.41041564941406; test_loss=159.4571990966797
Epoch179 train_loss=582.1387939453125; test_loss=159.0816192626953
Epoch180 train_loss=91.24468994140625; test_loss=158.5513153076172
Epoch181 train_loss=148.76971435546875; test_loss=156.54678344726562
Epoch182 train_loss=39.95542907714844; test_loss=166.67056274414062
Epoch183 train_loss=11.399116516113281; test_loss=157.43576049804688
Epoch184 train_loss=69.52779388427734; test_loss=156.92230224609375
Epoch185 train_loss=80.15437316894531; test_loss=157.42117309570312
Epoch186 train_loss=98.80945587158203; test_loss=157.11026000976562
Epoch187 train_loss=159.27133178710938; test_loss=158.11386108398438
Epoch188 train_loss=45.17109680175781; test_loss=155.21853637695312
Epoch189 train_loss=204.62342834472656; test_loss=201.7503204345703
Epoch190 train_loss=293.72540283203125; test_loss=157.7662353515625
Epoch191 train_loss=180.0516815185547; test_loss=159.81361389160156
Epoch192 train_loss=209.74801635742188; test_loss=158.267333984375
Epoch193 train_loss=144.35147094726562; test_loss=157.65740966796875
Epoch194 train_loss=146.917724609375; test_loss=157.07749938964844
Epoch195 train_loss=73.45849609375; test_loss=160.0405731201172
Epoch196 train_loss=165.48777770996094; test_loss=162.92916870117188
Epoch197 train_loss=93.2684326171875; test_loss=154.62257385253906
Epoch198 train_loss=115.22007751464844; test_loss=156.90965270996094
Epoch199 train_loss=78.01776123046875; test_loss=156.65185546875
Epoch200 train_loss=55.55316162109375; test_loss=157.97744750976562
Epoch201 train_loss=56.14067459106445; test_loss=155.46365356445312
Epoch202 train_loss=47.140419006347656; test_loss=158.71009826660156
Epoch203 train_loss=32.06898498535156; test_loss=156.3199005126953
Epoch204 train_loss=73.84916687011719; test_loss=160.65061950683594
Epoch205 train_loss=94.23664855957031; test_loss=158.33673095703125
Epoch206 train_loss=369.8416442871094; test_loss=159.7962646484375
Epoch207 train_loss=125.06693267822266; test_loss=158.16891479492188
Epoch208 train_loss=138.28236389160156; test_loss=157.10580444335938
Epoch209 train_loss=97.50376892089844; test_loss=157.31008911132812
Epoch210 train_loss=372.6959533691406; test_loss=154.489501953125
Epoch211 train_loss=147.47833251953125; test_loss=162.44931030273438
Epoch212 train_loss=128.71604919433594; test_loss=159.63082885742188
Epoch213 train_loss=340.22808837890625; test_loss=158.31031799316406
Epoch214 train_loss=19.308837890625; test_loss=154.69557189941406
Epoch215 train_loss=105.38534545898438; test_loss=155.5164337158203
Epoch216 train_loss=197.20358276367188; test_loss=158.1050262451172
Epoch217 train_loss=168.18643188476562; test_loss=156.25965881347656
Epoch218 train_loss=246.0167236328125; test_loss=159.25143432617188
Epoch219 train_loss=74.52694702148438; test_loss=154.72647094726562
Epoch220 train_loss=279.9048156738281; test_loss=160.83021545410156
Epoch221 train_loss=51.66949462890625; test_loss=171.77578735351562
Epoch222 train_loss=457.3315734863281; test_loss=156.6299285888672
Epoch223 train_loss=96.28323364257812; test_loss=157.06036376953125
Epoch224 train_loss=127.33724975585938; test_loss=156.87307739257812
Epoch225 train_loss=159.08360290527344; test_loss=156.53659057617188
Epoch226 train_loss=76.03099060058594; test_loss=157.40274047851562
Epoch227 train_loss=149.9807891845703; test_loss=157.10964965820312
Epoch228 train_loss=195.50921630859375; test_loss=162.04391479492188
Epoch229 train_loss=83.99767303466797; test_loss=157.4061737060547
Epoch230 train_loss=60.533897399902344; test_loss=161.07769775390625
Epoch231 train_loss=31.786540985107422; test_loss=158.8103790283203
Epoch232 train_loss=85.50621032714844; test_loss=156.3523406982422
Epoch233 train_loss=67.11495971679688; test_loss=158.11500549316406
Epoch234 train_loss=437.0824279785156; test_loss=156.9410858154297
Epoch235 train_loss=57.94063186645508; test_loss=159.74327087402344
Epoch236 train_loss=118.46746826171875; test_loss=159.04978942871094
Epoch237 train_loss=35.959964752197266; test_loss=167.20132446289062
Epoch238 train_loss=176.68081665039062; test_loss=160.11273193359375
Epoch239 train_loss=69.99273681640625; test_loss=158.20291137695312
Epoch240 train_loss=119.05180358886719; test_loss=159.66148376464844
Epoch241 train_loss=43.50865173339844; test_loss=160.3096466064453
Epoch242 train_loss=192.15542602539062; test_loss=161.30442810058594
Epoch243 train_loss=498.18603515625; test_loss=157.32757568359375
Epoch244 train_loss=56.308197021484375; test_loss=161.91246032714844
Epoch245 train_loss=328.0167236328125; test_loss=157.42947387695312
Epoch246 train_loss=116.65640258789062; test_loss=160.7299346923828
Epoch247 train_loss=61.168121337890625; test_loss=159.75453186035156
Epoch248 train_loss=30.899765014648438; test_loss=159.03822326660156
Epoch249 train_loss=50.12931823730469; test_loss=158.85301208496094
Epoch250 train_loss=283.42193603515625; test_loss=163.59718322753906
Epoch251 train_loss=220.32241821289062; test_loss=158.25567626953125
Epoch252 train_loss=119.0975341796875; test_loss=159.13636779785156
Epoch253 train_loss=120.32041931152344; test_loss=156.84803771972656
Epoch254 train_loss=113.75167846679688; test_loss=162.81842041015625
Epoch255 train_loss=86.86862182617188; test_loss=157.25088500976562
Epoch256 train_loss=83.10960388183594; test_loss=155.48292541503906
Epoch257 train_loss=110.39379119873047; test_loss=165.54066467285156
Epoch258 train_loss=132.0286865234375; test_loss=157.60678100585938
Epoch259 train_loss=92.20101928710938; test_loss=166.46824645996094
Epoch260 train_loss=115.95413208007812; test_loss=161.04742431640625
Epoch261 train_loss=53.47929382324219; test_loss=164.64837646484375
Epoch262 train_loss=22.285659790039062; test_loss=155.6371307373047
Epoch263 train_loss=64.3576889038086; test_loss=159.4901580810547
Epoch264 train_loss=90.72496032714844; test_loss=156.44615173339844
Epoch265 train_loss=122.88046264648438; test_loss=157.91575622558594
Epoch266 train_loss=212.3505096435547; test_loss=160.0001220703125
Epoch267 train_loss=117.87109375; test_loss=157.0355987548828
Epoch268 train_loss=77.91726684570312; test_loss=158.7170867919922
Epoch269 train_loss=114.42337036132812; test_loss=160.42300415039062
Epoch270 train_loss=456.5028076171875; test_loss=155.17025756835938
Epoch271 train_loss=62.816123962402344; test_loss=164.83157348632812
Epoch272 train_loss=156.5654754638672; test_loss=157.267822265625
Epoch273 train_loss=239.46142578125; test_loss=159.47039794921875
Epoch274 train_loss=133.52133178710938; test_loss=157.93466186523438
Epoch275 train_loss=280.1860046386719; test_loss=159.16944885253906
Epoch276 train_loss=72.33160400390625; test_loss=156.19424438476562
Epoch277 train_loss=149.44375610351562; test_loss=160.34042358398438
Epoch278 train_loss=84.24928283691406; test_loss=157.33648681640625
Epoch279 train_loss=93.84233093261719; test_loss=159.45057678222656
Epoch280 train_loss=122.87910461425781; test_loss=155.46914672851562
Epoch281 train_loss=145.3729705810547; test_loss=154.16590881347656
Epoch282 train_loss=393.31134033203125; test_loss=161.4230499267578
Epoch283 train_loss=178.1472625732422; test_loss=157.45159912109375
Epoch284 train_loss=119.67265319824219; test_loss=153.9945068359375
Epoch285 train_loss=44.62818145751953; test_loss=157.48326110839844
Epoch286 train_loss=129.54849243164062; test_loss=157.0924072265625
Epoch287 train_loss=79.77131652832031; test_loss=157.5442352294922
Epoch288 train_loss=124.92140197753906; test_loss=156.5182342529297
Epoch289 train_loss=153.64735412597656; test_loss=155.37612915039062
Epoch290 train_loss=78.94380187988281; test_loss=175.1632080078125
Epoch291 train_loss=43.312042236328125; test_loss=155.6036376953125
Epoch292 train_loss=158.66934204101562; test_loss=157.31112670898438
Epoch293 train_loss=53.76130676269531; test_loss=156.59902954101562
Epoch294 train_loss=81.11597442626953; test_loss=157.26046752929688
Epoch295 train_loss=239.68499755859375; test_loss=157.82843017578125
Epoch296 train_loss=128.1220703125; test_loss=155.45721435546875
Epoch297 train_loss=50.638580322265625; test_loss=158.94772338867188
Epoch298 train_loss=140.15365600585938; test_loss=155.27159118652344
Epoch299 train_loss=161.56954956054688; test_loss=155.13986206054688
Epoch300 train_loss=48.764923095703125; test_loss=157.24795532226562
Epoch301 train_loss=129.63002014160156; test_loss=156.74578857421875
Epoch302 train_loss=447.6294860839844; test_loss=160.76036071777344
Epoch303 train_loss=74.76884460449219; test_loss=156.81307983398438
Epoch304 train_loss=46.838287353515625; test_loss=160.62655639648438
Epoch305 train_loss=72.1614990234375; test_loss=158.66246032714844
Epoch306 train_loss=58.28050231933594; test_loss=155.49365234375
Epoch307 train_loss=80.88890075683594; test_loss=153.5075225830078
Epoch308 train_loss=58.636592864990234; test_loss=153.19064331054688
Epoch309 train_loss=161.66586303710938; test_loss=160.35816955566406
Epoch310 train_loss=188.08587646484375; test_loss=159.66847229003906
Epoch311 train_loss=73.94628143310547; test_loss=155.7113037109375
Epoch312 train_loss=161.9571533203125; test_loss=157.20358276367188
Epoch313 train_loss=88.71298217773438; test_loss=159.68685913085938
Epoch314 train_loss=62.18984603881836; test_loss=158.86947631835938
Epoch315 train_loss=42.14140319824219; test_loss=155.67642211914062
Epoch316 train_loss=24.51425552368164; test_loss=158.37100219726562
Epoch317 train_loss=77.24317932128906; test_loss=156.02452087402344
Epoch318 train_loss=43.03988265991211; test_loss=156.21470642089844
Epoch319 train_loss=35.168304443359375; test_loss=156.32655334472656
Epoch320 train_loss=96.87877655029297; test_loss=156.0442352294922
Epoch321 train_loss=36.936214447021484; test_loss=155.76486206054688
Epoch322 train_loss=176.90211486816406; test_loss=157.75059509277344
Epoch323 train_loss=185.47039794921875; test_loss=155.1932373046875
Epoch324 train_loss=31.35448455810547; test_loss=155.8848876953125
Epoch325 train_loss=57.217308044433594; test_loss=155.4472198486328
Epoch326 train_loss=77.21151733398438; test_loss=164.26324462890625
Epoch327 train_loss=65.2398681640625; test_loss=154.87387084960938
Epoch328 train_loss=224.70632934570312; test_loss=154.904052734375
Epoch329 train_loss=242.0235595703125; test_loss=157.4500732421875
Epoch330 train_loss=227.86593627929688; test_loss=165.37429809570312
Epoch331 train_loss=40.683719635009766; test_loss=155.67108154296875
Epoch332 train_loss=139.8217010498047; test_loss=156.23822021484375
Epoch333 train_loss=27.993087768554688; test_loss=156.38636779785156
Epoch334 train_loss=164.77316284179688; test_loss=155.70474243164062
Epoch335 train_loss=101.50838470458984; test_loss=158.04354858398438
Epoch336 train_loss=171.43515014648438; test_loss=153.3320770263672
Epoch337 train_loss=42.1482048034668; test_loss=155.7086181640625
Epoch338 train_loss=94.98640441894531; test_loss=156.93421936035156
Epoch339 train_loss=123.45733642578125; test_loss=159.73353576660156
Epoch340 train_loss=352.96746826171875; test_loss=155.40579223632812
Epoch341 train_loss=42.695274353027344; test_loss=155.76512145996094
Epoch342 train_loss=162.9612274169922; test_loss=156.3824005126953
Epoch343 train_loss=22.228065490722656; test_loss=154.9823455810547
Epoch344 train_loss=80.94161224365234; test_loss=161.09661865234375
Epoch345 train_loss=187.41867065429688; test_loss=154.9829864501953
Epoch346 train_loss=54.130592346191406; test_loss=157.60403442382812
Epoch347 train_loss=103.76974487304688; test_loss=155.80528259277344
Epoch348 train_loss=130.28443908691406; test_loss=156.71340942382812
Epoch349 train_loss=81.19345092773438; test_loss=155.3349609375
Epoch350 train_loss=136.9549560546875; test_loss=156.6873779296875
Epoch351 train_loss=82.77386474609375; test_loss=160.11280822753906
Epoch352 train_loss=29.57079315185547; test_loss=160.00411987304688
Epoch353 train_loss=44.11820983886719; test_loss=160.64471435546875
Epoch354 train_loss=105.68435668945312; test_loss=158.43797302246094
Epoch355 train_loss=50.431007385253906; test_loss=155.92291259765625
Epoch356 train_loss=132.92166137695312; test_loss=154.86767578125
Epoch357 train_loss=129.61972045898438; test_loss=156.5288848876953
Epoch358 train_loss=53.48627471923828; test_loss=158.38356018066406
Epoch359 train_loss=61.49053192138672; test_loss=155.1044464111328
Epoch360 train_loss=104.28958129882812; test_loss=155.7786407470703
Epoch361 train_loss=155.02268981933594; test_loss=158.19000244140625
Epoch362 train_loss=118.85694122314453; test_loss=156.2541046142578
Epoch363 train_loss=128.3726043701172; test_loss=158.77284240722656
Epoch364 train_loss=97.79180908203125; test_loss=158.85972595214844
Epoch365 train_loss=41.98169708251953; test_loss=156.72232055664062
Epoch366 train_loss=116.01158142089844; test_loss=155.53482055664062
Epoch367 train_loss=230.7568817138672; test_loss=155.68666076660156
Epoch368 train_loss=155.52294921875; test_loss=158.75375366210938
Epoch369 train_loss=96.39602661132812; test_loss=156.28067016601562
Epoch370 train_loss=64.92584228515625; test_loss=154.87185668945312
Epoch371 train_loss=342.9826965332031; test_loss=159.47328186035156
Epoch372 train_loss=97.36416625976562; test_loss=154.8780975341797
Epoch373 train_loss=41.56596374511719; test_loss=153.6339874267578
Epoch374 train_loss=90.69776916503906; test_loss=160.57225036621094
Epoch375 train_loss=32.684120178222656; test_loss=157.9943084716797
Epoch376 train_loss=44.11030578613281; test_loss=155.7186737060547
Epoch377 train_loss=211.22174072265625; test_loss=154.57838439941406
Epoch378 train_loss=59.88031005859375; test_loss=158.34884643554688
Epoch379 train_loss=36.496891021728516; test_loss=156.04745483398438
Epoch380 train_loss=63.11976623535156; test_loss=158.3339080810547
Epoch381 train_loss=36.85414123535156; test_loss=155.5919189453125
Epoch382 train_loss=304.93994140625; test_loss=157.65733337402344
Epoch383 train_loss=78.56336975097656; test_loss=156.2510223388672
Epoch384 train_loss=40.44752883911133; test_loss=159.55271911621094
Epoch385 train_loss=107.03292846679688; test_loss=156.147216796875
Epoch386 train_loss=85.821044921875; test_loss=157.48080444335938
Epoch387 train_loss=80.6686019897461; test_loss=155.59555053710938
Epoch388 train_loss=184.9477081298828; test_loss=160.1626434326172
Epoch389 train_loss=193.28062438964844; test_loss=155.28330993652344
Epoch390 train_loss=116.35884094238281; test_loss=157.49203491210938
Epoch391 train_loss=254.11239624023438; test_loss=160.13528442382812
Epoch392 train_loss=74.13525390625; test_loss=154.42091369628906
Epoch393 train_loss=45.94523620605469; test_loss=154.48648071289062
Epoch394 train_loss=258.57843017578125; test_loss=156.38853454589844
Epoch395 train_loss=181.9000701904297; test_loss=154.1499481201172
Epoch396 train_loss=55.24093246459961; test_loss=155.5378875732422
Epoch397 train_loss=69.66325378417969; test_loss=156.69854736328125
Epoch398 train_loss=58.155094146728516; test_loss=162.90565490722656
Epoch399 train_loss=159.16864013671875; test_loss=155.07412719726562
Epoch400 train_loss=22.14362335205078; test_loss=159.4011688232422
Epoch401 train_loss=82.4576187133789; test_loss=159.2090301513672
Epoch402 train_loss=193.02993774414062; test_loss=153.9360809326172
Epoch403 train_loss=45.685508728027344; test_loss=158.89529418945312
Epoch404 train_loss=410.91278076171875; test_loss=162.66189575195312
Epoch405 train_loss=79.22164154052734; test_loss=155.4266815185547
Epoch406 train_loss=280.2689208984375; test_loss=156.3003692626953
Epoch407 train_loss=42.39320755004883; test_loss=155.847412109375
Epoch408 train_loss=74.82241821289062; test_loss=159.40838623046875
Epoch409 train_loss=75.973388671875; test_loss=161.21873474121094
Epoch410 train_loss=56.57196807861328; test_loss=160.07064819335938
Epoch411 train_loss=101.46333312988281; test_loss=157.3552703857422
Epoch412 train_loss=116.28924560546875; test_loss=159.4746856689453
Epoch413 train_loss=100.77550506591797; test_loss=154.9732666015625
Epoch414 train_loss=87.00654602050781; test_loss=155.36756896972656
Epoch415 train_loss=43.44228744506836; test_loss=156.5855255126953
Epoch416 train_loss=127.68897247314453; test_loss=156.987548828125
Epoch417 train_loss=70.83488464355469; test_loss=157.8230438232422
Epoch418 train_loss=349.48809814453125; test_loss=160.46629333496094
Epoch419 train_loss=148.16244506835938; test_loss=154.97279357910156
Epoch420 train_loss=80.6659927368164; test_loss=154.1068572998047
Epoch421 train_loss=160.11053466796875; test_loss=153.82638549804688
Epoch422 train_loss=36.61426544189453; test_loss=155.4512176513672
Epoch423 train_loss=87.58662414550781; test_loss=157.88096618652344
Epoch424 train_loss=83.11959838867188; test_loss=156.2938690185547
Epoch425 train_loss=194.52455139160156; test_loss=157.1503143310547
Epoch426 train_loss=220.98040771484375; test_loss=156.4385223388672
Epoch427 train_loss=73.94499206542969; test_loss=156.34239196777344
Epoch428 train_loss=147.4461669921875; test_loss=172.96023559570312
Epoch429 train_loss=81.93447875976562; test_loss=156.0369415283203
Epoch430 train_loss=69.92483520507812; test_loss=160.43484497070312
Epoch431 train_loss=49.139564514160156; test_loss=156.55447387695312
Epoch432 train_loss=42.044551849365234; test_loss=159.56997680664062
Epoch433 train_loss=89.66943359375; test_loss=156.3365936279297
Epoch434 train_loss=41.81988525390625; test_loss=154.7353515625
Epoch435 train_loss=89.6656494140625; test_loss=157.0782470703125
Epoch436 train_loss=166.99485778808594; test_loss=158.75094604492188
Epoch437 train_loss=355.43060302734375; test_loss=161.21710205078125
Epoch438 train_loss=26.0251407623291; test_loss=154.78546142578125
Epoch439 train_loss=41.70506286621094; test_loss=157.14273071289062
Epoch440 train_loss=118.00416564941406; test_loss=157.97726440429688
Epoch441 train_loss=74.61366271972656; test_loss=155.7865753173828
Epoch442 train_loss=140.34292602539062; test_loss=157.87002563476562
Epoch443 train_loss=54.37384033203125; test_loss=159.4100799560547
Epoch444 train_loss=202.99087524414062; test_loss=155.4152069091797
Epoch445 train_loss=33.79759979248047; test_loss=161.51296997070312
Epoch446 train_loss=173.99461364746094; test_loss=161.4767608642578
Epoch447 train_loss=464.4120178222656; test_loss=156.98468017578125
Epoch448 train_loss=27.79755401611328; test_loss=154.8408966064453
Epoch449 train_loss=82.067626953125; test_loss=156.9676055908203
Epoch450 train_loss=81.83589172363281; test_loss=158.3488006591797
Epoch451 train_loss=936.1513061523438; test_loss=155.13394165039062
Epoch452 train_loss=233.829833984375; test_loss=156.0847930908203
Epoch453 train_loss=164.588134765625; test_loss=157.59521484375
Epoch454 train_loss=94.87618255615234; test_loss=156.64366149902344
Epoch455 train_loss=57.32227325439453; test_loss=162.7572021484375
Epoch456 train_loss=117.60162353515625; test_loss=156.32765197753906
Epoch457 train_loss=104.44548034667969; test_loss=156.89431762695312
Epoch458 train_loss=109.99272155761719; test_loss=156.019287109375
Epoch459 train_loss=110.68758392333984; test_loss=156.09930419921875
Epoch460 train_loss=160.05166625976562; test_loss=161.89918518066406
Epoch461 train_loss=93.52716827392578; test_loss=160.64195251464844
Epoch462 train_loss=91.53858947753906; test_loss=158.73544311523438
Epoch463 train_loss=120.45026397705078; test_loss=156.56300354003906
Epoch464 train_loss=96.86151123046875; test_loss=156.47708129882812
Epoch465 train_loss=72.43281555175781; test_loss=159.40798950195312
Epoch466 train_loss=86.53955078125; test_loss=158.44012451171875
Epoch467 train_loss=130.0906219482422; test_loss=156.2655487060547
Epoch468 train_loss=41.6428108215332; test_loss=156.33375549316406
Epoch469 train_loss=251.01797485351562; test_loss=156.2308349609375
Epoch470 train_loss=463.22772216796875; test_loss=157.55331420898438
Epoch471 train_loss=72.02928161621094; test_loss=160.66310119628906
Epoch472 train_loss=180.4463348388672; test_loss=162.15406799316406
Epoch473 train_loss=84.95249938964844; test_loss=157.0546417236328
Epoch474 train_loss=253.77676391601562; test_loss=156.97296142578125
Epoch475 train_loss=45.75367736816406; test_loss=158.6702117919922
Epoch476 train_loss=114.96803283691406; test_loss=157.60105895996094
Epoch477 train_loss=213.59625244140625; test_loss=178.17428588867188
Epoch478 train_loss=96.45130920410156; test_loss=158.0075225830078
Epoch479 train_loss=43.420204162597656; test_loss=156.49609375
Epoch480 train_loss=88.82180786132812; test_loss=157.4554443359375
Epoch481 train_loss=304.3070068359375; test_loss=159.0620574951172
Epoch482 train_loss=45.591468811035156; test_loss=156.83132934570312
Epoch483 train_loss=31.903953552246094; test_loss=158.37294006347656
Epoch484 train_loss=261.2679443359375; test_loss=156.23948669433594
Epoch485 train_loss=31.44078826904297; test_loss=157.00958251953125
Epoch486 train_loss=129.649169921875; test_loss=159.41006469726562
Epoch487 train_loss=57.911651611328125; test_loss=157.1932830810547
Epoch488 train_loss=137.20721435546875; test_loss=165.02960205078125
Epoch489 train_loss=36.76164245605469; test_loss=156.44097900390625
Epoch490 train_loss=308.53948974609375; test_loss=164.40650939941406
Epoch491 train_loss=87.79166412353516; test_loss=157.24110412597656
Epoch492 train_loss=36.23698425292969; test_loss=158.57522583007812
Epoch493 train_loss=97.57183837890625; test_loss=156.91578674316406
Epoch494 train_loss=80.10247039794922; test_loss=157.77000427246094
Epoch495 train_loss=114.35657501220703; test_loss=156.47433471679688
Epoch496 train_loss=97.1925048828125; test_loss=158.68141174316406
Epoch497 train_loss=160.2072296142578; test_loss=157.89334106445312
Epoch498 train_loss=133.01840209960938; test_loss=155.07327270507812
Epoch499 train_loss=149.51220703125; test_loss=158.9419403076172
Epoch500 train_loss=75.36346435546875; test_loss=161.90469360351562
Epoch501 train_loss=72.79113006591797; test_loss=156.72219848632812
Epoch502 train_loss=88.86517333984375; test_loss=157.16229248046875
Epoch503 train_loss=69.73001861572266; test_loss=161.7447509765625
Epoch504 train_loss=105.80451965332031; test_loss=162.19207763671875
Epoch505 train_loss=53.78046417236328; test_loss=159.50668334960938
Epoch506 train_loss=45.00457763671875; test_loss=158.1820526123047
Epoch507 train_loss=319.07940673828125; test_loss=158.9049835205078
Epoch508 train_loss=279.985107421875; test_loss=158.33993530273438
Epoch509 train_loss=38.1263427734375; test_loss=158.09671020507812
Epoch510 train_loss=39.00874710083008; test_loss=159.54611206054688
Epoch511 train_loss=229.0691680908203; test_loss=155.87298583984375
Epoch512 train_loss=47.112945556640625; test_loss=155.92657470703125
Epoch513 train_loss=318.4751281738281; test_loss=161.0912628173828
Epoch514 train_loss=414.63800048828125; test_loss=157.82977294921875
Epoch515 train_loss=99.71519470214844; test_loss=161.4786834716797
Epoch516 train_loss=125.42791748046875; test_loss=158.2798309326172
Epoch517 train_loss=274.2529296875; test_loss=158.29464721679688
Epoch518 train_loss=97.10447692871094; test_loss=168.37608337402344
Epoch519 train_loss=28.003089904785156; test_loss=159.34071350097656
Epoch520 train_loss=127.56344604492188; test_loss=156.94039916992188
Epoch521 train_loss=109.38552856445312; test_loss=155.16152954101562
Epoch522 train_loss=114.57647705078125; test_loss=157.23916625976562
Epoch523 train_loss=400.508544921875; test_loss=157.80149841308594
Epoch524 train_loss=311.4013671875; test_loss=160.10194396972656
Epoch525 train_loss=195.37026977539062; test_loss=158.43698120117188
Epoch526 train_loss=19.304004669189453; test_loss=155.9678497314453
Epoch527 train_loss=172.98341369628906; test_loss=156.81224060058594
Epoch528 train_loss=89.56271362304688; test_loss=158.01809692382812
Epoch529 train_loss=70.35820007324219; test_loss=157.10134887695312
Epoch530 train_loss=299.66064453125; test_loss=159.86984252929688
Epoch531 train_loss=59.36846923828125; test_loss=157.657958984375
Epoch532 train_loss=57.355770111083984; test_loss=158.13356018066406
Epoch533 train_loss=157.83181762695312; test_loss=156.85919189453125
Epoch534 train_loss=51.96007537841797; test_loss=156.64317321777344
Epoch535 train_loss=37.03407287597656; test_loss=154.81048583984375
Epoch536 train_loss=220.31756591796875; test_loss=156.00975036621094
Epoch537 train_loss=56.81151580810547; test_loss=157.26004028320312
Epoch538 train_loss=187.94796752929688; test_loss=157.68392944335938
Epoch539 train_loss=107.29461669921875; test_loss=156.93258666992188
Epoch540 train_loss=105.13600158691406; test_loss=157.75680541992188
Epoch541 train_loss=174.5078887939453; test_loss=157.0257110595703
Epoch542 train_loss=27.468990325927734; test_loss=160.25059509277344
Epoch543 train_loss=57.82746887207031; test_loss=159.22975158691406
Epoch544 train_loss=67.2974624633789; test_loss=159.4420623779297
Epoch545 train_loss=127.60808563232422; test_loss=158.13136291503906
Epoch546 train_loss=136.32687377929688; test_loss=157.05845642089844
Epoch547 train_loss=97.98194885253906; test_loss=156.8125457763672
Epoch548 train_loss=57.830055236816406; test_loss=158.80621337890625
Epoch549 train_loss=182.01438903808594; test_loss=157.57212829589844
Epoch550 train_loss=326.4491882324219; test_loss=161.1302947998047
Epoch551 train_loss=121.52574157714844; test_loss=157.8157958984375
Epoch552 train_loss=121.57344055175781; test_loss=161.35401916503906
Epoch553 train_loss=158.73667907714844; test_loss=159.8231658935547
Epoch554 train_loss=52.270416259765625; test_loss=161.8318328857422
Epoch555 train_loss=36.81879425048828; test_loss=158.75436401367188
Epoch556 train_loss=68.24382019042969; test_loss=159.257080078125
Epoch557 train_loss=152.8881378173828; test_loss=157.38888549804688
Epoch558 train_loss=82.26020050048828; test_loss=159.3812713623047
Epoch559 train_loss=91.01786804199219; test_loss=159.67909240722656
Epoch560 train_loss=200.1039276123047; test_loss=155.87908935546875
Epoch561 train_loss=119.37222290039062; test_loss=158.80528259277344
Epoch562 train_loss=52.24382781982422; test_loss=157.5751190185547
Epoch563 train_loss=103.26209259033203; test_loss=160.7227020263672
Epoch564 train_loss=297.9802551269531; test_loss=158.84996032714844
Epoch565 train_loss=93.34426879882812; test_loss=156.81515502929688
Epoch566 train_loss=185.40773010253906; test_loss=159.9794921875
Epoch567 train_loss=75.44111633300781; test_loss=161.95909118652344
Epoch568 train_loss=61.483612060546875; test_loss=158.5989990234375
Epoch569 train_loss=59.80012130737305; test_loss=157.86419677734375
Epoch570 train_loss=14.510164260864258; test_loss=160.091552734375
Epoch571 train_loss=352.4963073730469; test_loss=156.1521759033203
Epoch572 train_loss=95.80337524414062; test_loss=158.85504150390625
Epoch573 train_loss=425.52801513671875; test_loss=158.8076629638672
Epoch574 train_loss=45.73882293701172; test_loss=160.0908966064453
Epoch575 train_loss=82.82467651367188; test_loss=155.5208740234375
Epoch576 train_loss=8.320350646972656; test_loss=165.11703491210938
Epoch577 train_loss=70.08702087402344; test_loss=159.16065979003906
Epoch578 train_loss=73.88822937011719; test_loss=158.33619689941406
Epoch579 train_loss=184.88963317871094; test_loss=158.93505859375
Epoch580 train_loss=215.1313934326172; test_loss=158.27781677246094
Epoch581 train_loss=199.95111083984375; test_loss=159.60643005371094
Epoch582 train_loss=36.13941955566406; test_loss=157.15101623535156
Epoch583 train_loss=54.202667236328125; test_loss=159.3603515625
Epoch584 train_loss=54.60075378417969; test_loss=159.3018341064453
Epoch585 train_loss=143.7047576904297; test_loss=160.59017944335938
Epoch586 train_loss=27.379194259643555; test_loss=160.71043395996094
Epoch587 train_loss=387.8021240234375; test_loss=158.5909881591797
Epoch588 train_loss=62.065067291259766; test_loss=158.30152893066406
Epoch589 train_loss=60.856475830078125; test_loss=157.5819549560547
Epoch590 train_loss=43.625938415527344; test_loss=160.54318237304688
Epoch591 train_loss=82.2181396484375; test_loss=157.120849609375
Epoch592 train_loss=36.20049285888672; test_loss=163.27243041992188
Epoch593 train_loss=41.56233215332031; test_loss=161.28970336914062
Epoch594 train_loss=49.71647262573242; test_loss=159.006591796875
Epoch595 train_loss=13.232192993164062; test_loss=159.8789520263672
Epoch596 train_loss=80.52043151855469; test_loss=157.5912628173828
Epoch597 train_loss=94.76361083984375; test_loss=160.25363159179688
Epoch598 train_loss=49.999603271484375; test_loss=158.164306640625
Epoch599 train_loss=107.98670196533203; test_loss=161.5602264404297
Epoch600 train_loss=133.38052368164062; test_loss=158.1473846435547
Epoch601 train_loss=278.8830261230469; test_loss=159.23361206054688
Epoch602 train_loss=175.65887451171875; test_loss=161.60166931152344
Epoch603 train_loss=78.95587921142578; test_loss=161.19129943847656
Epoch604 train_loss=258.05029296875; test_loss=162.15835571289062
Epoch605 train_loss=384.7180480957031; test_loss=159.0907745361328
Epoch606 train_loss=113.35403442382812; test_loss=160.45375061035156
Epoch607 train_loss=240.71096801757812; test_loss=158.31661987304688
Epoch608 train_loss=81.73231506347656; test_loss=159.68557739257812
Epoch609 train_loss=403.9735107421875; test_loss=162.0383758544922
Epoch610 train_loss=20.20953369140625; test_loss=156.4530487060547
Epoch611 train_loss=104.12654113769531; test_loss=161.1194610595703
Epoch612 train_loss=12.8040771484375; test_loss=159.4859619140625
Epoch613 train_loss=107.87362670898438; test_loss=162.30308532714844
Epoch614 train_loss=103.01911163330078; test_loss=162.79876708984375
Epoch615 train_loss=32.52131652832031; test_loss=158.99217224121094
Epoch616 train_loss=115.14144134521484; test_loss=161.04847717285156
Epoch617 train_loss=148.32568359375; test_loss=162.00506591796875
Epoch618 train_loss=99.72334289550781; test_loss=159.16062927246094
Epoch619 train_loss=44.795654296875; test_loss=160.69615173339844
Epoch620 train_loss=103.90460968017578; test_loss=156.0545654296875
Epoch621 train_loss=369.5564270019531; test_loss=158.74681091308594
Epoch622 train_loss=39.69432067871094; test_loss=165.04006958007812
Epoch623 train_loss=111.44044494628906; test_loss=156.9287567138672
Epoch624 train_loss=158.58966064453125; test_loss=159.46444702148438
Epoch625 train_loss=238.53762817382812; test_loss=161.67596435546875
Epoch626 train_loss=75.73883056640625; test_loss=161.23141479492188
Epoch627 train_loss=84.53640747070312; test_loss=158.3931427001953
Epoch628 train_loss=124.0093002319336; test_loss=158.93502807617188
Epoch629 train_loss=181.034912109375; test_loss=161.30172729492188
Epoch630 train_loss=244.29306030273438; test_loss=159.15235900878906
Epoch631 train_loss=230.6396026611328; test_loss=160.50953674316406
Epoch632 train_loss=86.39418029785156; test_loss=163.6450653076172
Epoch633 train_loss=155.98260498046875; test_loss=165.10614013671875
Epoch634 train_loss=27.842453002929688; test_loss=161.62722778320312
Epoch635 train_loss=95.13692474365234; test_loss=159.42666625976562
Epoch636 train_loss=62.157867431640625; test_loss=159.798583984375
Epoch637 train_loss=85.82996368408203; test_loss=160.0763702392578
Epoch638 train_loss=107.65570068359375; test_loss=163.782470703125
Epoch639 train_loss=118.126220703125; test_loss=159.41217041015625
Epoch640 train_loss=58.7795295715332; test_loss=161.66915893554688
Epoch641 train_loss=46.216522216796875; test_loss=160.48435974121094
Epoch642 train_loss=48.82173156738281; test_loss=160.0013427734375
Epoch643 train_loss=48.849945068359375; test_loss=160.94261169433594
Epoch644 train_loss=86.59103393554688; test_loss=160.2552032470703
Epoch645 train_loss=123.02787017822266; test_loss=159.80935668945312
Epoch646 train_loss=96.84413146972656; test_loss=163.8826446533203
Epoch647 train_loss=205.36196899414062; test_loss=161.53424072265625
Epoch648 train_loss=65.33238983154297; test_loss=159.7039337158203
Epoch649 train_loss=24.55377960205078; test_loss=159.11203002929688
Epoch650 train_loss=83.63430786132812; test_loss=158.6905975341797
Epoch651 train_loss=116.31982421875; test_loss=161.3148193359375
Epoch652 train_loss=239.64683532714844; test_loss=159.74359130859375
Epoch653 train_loss=75.30238342285156; test_loss=158.0349578857422
Epoch654 train_loss=40.62220764160156; test_loss=159.19972229003906
Epoch655 train_loss=193.66357421875; test_loss=160.80117797851562
Epoch656 train_loss=108.54193115234375; test_loss=159.252685546875
Epoch657 train_loss=20.2637939453125; test_loss=162.5788116455078
Epoch658 train_loss=187.16648864746094; test_loss=157.70205688476562
Epoch659 train_loss=144.5233154296875; test_loss=160.42701721191406
Epoch660 train_loss=113.41656494140625; test_loss=160.68821716308594
Epoch661 train_loss=145.90916442871094; test_loss=159.8861846923828
Epoch662 train_loss=110.98820495605469; test_loss=161.9969482421875
Epoch663 train_loss=90.38832092285156; test_loss=159.15057373046875
Epoch664 train_loss=112.99493408203125; test_loss=157.5693359375
Epoch665 train_loss=60.93595504760742; test_loss=159.5548095703125
Epoch666 train_loss=100.48088073730469; test_loss=159.18780517578125
Epoch667 train_loss=42.72248077392578; test_loss=160.24400329589844
Epoch668 train_loss=232.2533721923828; test_loss=162.48773193359375
Epoch669 train_loss=102.36001586914062; test_loss=163.3543243408203
Epoch670 train_loss=89.37991333007812; test_loss=162.24215698242188
Epoch671 train_loss=238.38209533691406; test_loss=159.64146423339844
Epoch672 train_loss=67.14372253417969; test_loss=168.39923095703125
Epoch673 train_loss=135.4595184326172; test_loss=162.35792541503906
Epoch674 train_loss=51.96052932739258; test_loss=160.50918579101562
Epoch675 train_loss=42.21833801269531; test_loss=161.40614318847656
Epoch676 train_loss=50.700347900390625; test_loss=161.5101318359375
Epoch677 train_loss=51.01227569580078; test_loss=161.93804931640625
Epoch678 train_loss=109.48514556884766; test_loss=163.7764129638672
Epoch679 train_loss=131.42469787597656; test_loss=161.57733154296875
Epoch680 train_loss=57.29681396484375; test_loss=160.8414764404297
Epoch681 train_loss=107.48136901855469; test_loss=160.68785095214844
Epoch682 train_loss=150.71267700195312; test_loss=160.0620880126953
Epoch683 train_loss=46.55323028564453; test_loss=159.570556640625
Epoch684 train_loss=389.5161437988281; test_loss=160.48974609375
Epoch685 train_loss=100.2267074584961; test_loss=161.62991333007812
Epoch686 train_loss=169.7545928955078; test_loss=164.7905731201172
Epoch687 train_loss=103.82070922851562; test_loss=160.74671936035156
Epoch688 train_loss=88.17003631591797; test_loss=167.05929565429688
Epoch689 train_loss=50.286949157714844; test_loss=163.34884643554688
Epoch690 train_loss=88.57996368408203; test_loss=163.66470336914062
Epoch691 train_loss=213.45712280273438; test_loss=161.28475952148438
Epoch692 train_loss=20.727622985839844; test_loss=157.65440368652344
Epoch693 train_loss=54.908912658691406; test_loss=158.8999481201172
Epoch694 train_loss=49.272605895996094; test_loss=159.7720184326172
Epoch695 train_loss=74.83616638183594; test_loss=160.96975708007812
Epoch696 train_loss=53.20071029663086; test_loss=159.3464813232422
Epoch697 train_loss=48.39032745361328; test_loss=160.92471313476562
Epoch698 train_loss=140.40164184570312; test_loss=159.20449829101562
Epoch699 train_loss=61.81072998046875; test_loss=160.54591369628906
Epoch700 train_loss=209.85012817382812; test_loss=160.73757934570312
Epoch701 train_loss=37.10148620605469; test_loss=161.1822052001953
Epoch702 train_loss=132.4160919189453; test_loss=162.15292358398438
Epoch703 train_loss=149.66110229492188; test_loss=159.7886505126953
Epoch704 train_loss=259.6661682128906; test_loss=159.8008575439453
Epoch705 train_loss=40.850563049316406; test_loss=161.0608673095703
Epoch706 train_loss=404.30419921875; test_loss=160.02630615234375
Epoch707 train_loss=38.64503860473633; test_loss=159.0687713623047
Epoch708 train_loss=65.13631439208984; test_loss=158.3383331298828
Epoch709 train_loss=135.55157470703125; test_loss=159.9521484375
Epoch710 train_loss=61.12104797363281; test_loss=161.2499237060547
Epoch711 train_loss=129.0074462890625; test_loss=167.1533966064453
Epoch712 train_loss=38.3895149230957; test_loss=162.72799682617188
Epoch713 train_loss=86.79163360595703; test_loss=161.01885986328125
Epoch714 train_loss=384.34112548828125; test_loss=160.45953369140625
Epoch715 train_loss=343.46380615234375; test_loss=166.25042724609375
Epoch716 train_loss=32.68581008911133; test_loss=158.52041625976562
Epoch717 train_loss=158.44378662109375; test_loss=165.91416931152344
Epoch718 train_loss=110.226806640625; test_loss=162.27960205078125
Epoch719 train_loss=244.46934509277344; test_loss=165.34603881835938
Epoch720 train_loss=29.84454345703125; test_loss=160.6925811767578
Epoch721 train_loss=66.13783264160156; test_loss=158.75418090820312
Epoch722 train_loss=99.81741333007812; test_loss=160.4658966064453
Epoch723 train_loss=37.320587158203125; test_loss=161.0635986328125
Epoch724 train_loss=436.94842529296875; test_loss=159.7150421142578
Epoch725 train_loss=137.912109375; test_loss=162.82789611816406
Epoch726 train_loss=315.70733642578125; test_loss=158.52975463867188
Epoch727 train_loss=144.81808471679688; test_loss=161.18190002441406
Epoch728 train_loss=134.2421875; test_loss=162.7991180419922
Epoch729 train_loss=384.799072265625; test_loss=160.421875
Epoch730 train_loss=394.3551025390625; test_loss=162.89923095703125
Epoch731 train_loss=126.01132202148438; test_loss=172.28297424316406
Epoch732 train_loss=69.92373657226562; test_loss=165.18431091308594
Epoch733 train_loss=129.0947723388672; test_loss=160.1246795654297
Epoch734 train_loss=272.43695068359375; test_loss=159.81236267089844
Epoch735 train_loss=46.49626159667969; test_loss=166.9007568359375
Epoch736 train_loss=18.844070434570312; test_loss=160.53013610839844
Epoch737 train_loss=12.504467010498047; test_loss=161.0752716064453
Epoch738 train_loss=354.7116394042969; test_loss=161.8371124267578
Epoch739 train_loss=30.552196502685547; test_loss=160.7371368408203
Epoch740 train_loss=136.79273986816406; test_loss=162.5528106689453
Epoch741 train_loss=91.27163696289062; test_loss=160.41958618164062
Epoch742 train_loss=112.94252014160156; test_loss=161.32534790039062
Epoch743 train_loss=73.6462631225586; test_loss=161.02745056152344
Epoch744 train_loss=163.97036743164062; test_loss=161.59132385253906
Epoch745 train_loss=109.76889038085938; test_loss=160.44757080078125
Epoch746 train_loss=85.2767562866211; test_loss=161.21499633789062
Epoch747 train_loss=72.29331970214844; test_loss=163.05020141601562
Epoch748 train_loss=21.242929458618164; test_loss=160.5077362060547
Epoch749 train_loss=130.74734497070312; test_loss=164.73484802246094
Epoch750 train_loss=67.43273162841797; test_loss=162.2967529296875
Epoch751 train_loss=330.92083740234375; test_loss=159.95594787597656
Epoch752 train_loss=96.9501953125; test_loss=163.59266662597656
Epoch753 train_loss=49.85664367675781; test_loss=161.22579956054688
Epoch754 train_loss=117.1055908203125; test_loss=164.95925903320312
Epoch755 train_loss=63.898128509521484; test_loss=161.5965118408203
Epoch756 train_loss=117.88423919677734; test_loss=159.68165588378906
Epoch757 train_loss=49.44392395019531; test_loss=159.53240966796875
Epoch758 train_loss=63.58131408691406; test_loss=162.18128967285156
Epoch759 train_loss=105.92473602294922; test_loss=159.6190185546875
Epoch760 train_loss=92.831787109375; test_loss=163.5776824951172
Epoch761 train_loss=443.2356872558594; test_loss=160.51873779296875
Epoch762 train_loss=67.6109848022461; test_loss=160.83912658691406
Epoch763 train_loss=126.43057250976562; test_loss=164.08836364746094
Epoch764 train_loss=136.38186645507812; test_loss=163.96725463867188
Epoch765 train_loss=70.2791976928711; test_loss=163.03604125976562
Epoch766 train_loss=79.3222427368164; test_loss=161.3008575439453
Epoch767 train_loss=79.24044036865234; test_loss=160.55062866210938
Epoch768 train_loss=54.743045806884766; test_loss=162.62161254882812
Epoch769 train_loss=54.375396728515625; test_loss=164.63734436035156
Epoch770 train_loss=27.520214080810547; test_loss=161.54342651367188
Epoch771 train_loss=76.40496826171875; test_loss=163.80197143554688
Epoch772 train_loss=136.40737915039062; test_loss=158.78477478027344
Epoch773 train_loss=135.30148315429688; test_loss=162.4717559814453
Epoch774 train_loss=44.61865234375; test_loss=162.97789001464844
Epoch775 train_loss=96.79450225830078; test_loss=163.2512969970703
Epoch776 train_loss=264.20159912109375; test_loss=163.6277313232422
Epoch777 train_loss=254.00057983398438; test_loss=161.6923370361328
Epoch778 train_loss=88.52998352050781; test_loss=167.32891845703125
Epoch779 train_loss=166.28042602539062; test_loss=162.6763916015625
Epoch780 train_loss=129.09979248046875; test_loss=160.64097595214844
Epoch781 train_loss=52.248291015625; test_loss=161.06134033203125
Epoch782 train_loss=92.49449157714844; test_loss=161.36251831054688
Epoch783 train_loss=77.95048522949219; test_loss=166.21524047851562
Epoch784 train_loss=131.18408203125; test_loss=164.69534301757812
Epoch785 train_loss=74.59150695800781; test_loss=162.96954345703125
Epoch786 train_loss=38.891578674316406; test_loss=163.21981811523438
Epoch787 train_loss=263.0047302246094; test_loss=161.02435302734375
Epoch788 train_loss=138.06210327148438; test_loss=161.14231872558594
Epoch789 train_loss=56.66741180419922; test_loss=164.55699157714844
Epoch790 train_loss=23.09756088256836; test_loss=163.86614990234375
Epoch791 train_loss=84.18345642089844; test_loss=161.2508544921875
Epoch792 train_loss=185.76095581054688; test_loss=162.15621948242188
Epoch793 train_loss=69.65618896484375; test_loss=165.04248046875
Epoch794 train_loss=24.31280517578125; test_loss=160.37255859375
Epoch795 train_loss=20.774688720703125; test_loss=162.9391632080078
Epoch796 train_loss=121.51170349121094; test_loss=163.329345703125
Epoch797 train_loss=47.56474685668945; test_loss=165.85003662109375
Epoch798 train_loss=63.192596435546875; test_loss=163.55816650390625
Epoch799 train_loss=36.572532653808594; test_loss=161.27154541015625
Epoch800 train_loss=139.7332000732422; test_loss=162.21548461914062
Epoch801 train_loss=90.24751281738281; test_loss=162.60833740234375
Epoch802 train_loss=81.73529815673828; test_loss=170.91940307617188
Epoch803 train_loss=249.3759307861328; test_loss=161.90982055664062
Epoch804 train_loss=398.12957763671875; test_loss=161.86727905273438
Epoch805 train_loss=42.1158447265625; test_loss=161.76324462890625
Epoch806 train_loss=66.23724365234375; test_loss=161.83348083496094
Epoch807 train_loss=77.91555786132812; test_loss=164.37509155273438
Epoch808 train_loss=26.943527221679688; test_loss=162.0956268310547
Epoch809 train_loss=123.56669616699219; test_loss=165.6992645263672
Epoch810 train_loss=45.58039855957031; test_loss=163.5645294189453
Epoch811 train_loss=120.74514770507812; test_loss=171.93106079101562
Epoch812 train_loss=74.6563491821289; test_loss=166.33349609375
Epoch813 train_loss=51.04841995239258; test_loss=161.36473083496094
Epoch814 train_loss=107.85641479492188; test_loss=160.1649932861328
Epoch815 train_loss=233.1508026123047; test_loss=163.5208282470703
Epoch816 train_loss=24.100955963134766; test_loss=161.55197143554688
Epoch817 train_loss=55.633583068847656; test_loss=160.13018798828125
Epoch818 train_loss=224.949951171875; test_loss=165.03692626953125
Epoch819 train_loss=51.02318572998047; test_loss=160.9248809814453
Epoch820 train_loss=84.17124938964844; test_loss=160.65208435058594
Epoch821 train_loss=71.1436996459961; test_loss=163.45437622070312
Epoch822 train_loss=96.76828002929688; test_loss=168.96078491210938
Epoch823 train_loss=430.45086669921875; test_loss=161.73402404785156
Epoch824 train_loss=65.65291595458984; test_loss=163.6767578125
Epoch825 train_loss=132.88807678222656; test_loss=163.01654052734375
Epoch826 train_loss=15.556930541992188; test_loss=166.04095458984375
Epoch827 train_loss=41.78657531738281; test_loss=160.10296630859375
Epoch828 train_loss=132.62857055664062; test_loss=162.82293701171875
Epoch829 train_loss=284.7024230957031; test_loss=166.58680725097656
Epoch830 train_loss=109.63080596923828; test_loss=165.36251831054688
Epoch831 train_loss=41.88566207885742; test_loss=161.3252716064453
Epoch832 train_loss=317.83062744140625; test_loss=170.1806640625
Epoch833 train_loss=235.51272583007812; test_loss=160.59783935546875
Epoch834 train_loss=62.0770149230957; test_loss=166.95321655273438
Epoch835 train_loss=56.023040771484375; test_loss=163.93809509277344
Epoch836 train_loss=216.78744506835938; test_loss=162.2593231201172
Epoch837 train_loss=224.16319274902344; test_loss=163.3531494140625
Epoch838 train_loss=68.2847900390625; test_loss=165.20057678222656
Epoch839 train_loss=159.84222412109375; test_loss=160.69895935058594
Epoch840 train_loss=38.26762771606445; test_loss=161.59762573242188
Epoch841 train_loss=40.66462707519531; test_loss=167.89720153808594
Epoch842 train_loss=163.61338806152344; test_loss=163.40728759765625
Epoch843 train_loss=303.3103942871094; test_loss=164.2964630126953
Epoch844 train_loss=84.90702819824219; test_loss=162.47740173339844
Epoch845 train_loss=143.31027221679688; test_loss=161.4622039794922
Epoch846 train_loss=73.26042175292969; test_loss=166.0674591064453
Epoch847 train_loss=134.1834716796875; test_loss=166.73233032226562
Epoch848 train_loss=57.20661926269531; test_loss=162.31639099121094
Epoch849 train_loss=59.61842727661133; test_loss=163.49378967285156
Epoch850 train_loss=104.73474884033203; test_loss=161.24951171875
Epoch851 train_loss=46.07494354248047; test_loss=163.23231506347656
Epoch852 train_loss=47.477256774902344; test_loss=160.9785919189453
Epoch853 train_loss=98.21084594726562; test_loss=164.41940307617188
Epoch854 train_loss=157.62290954589844; test_loss=161.62843322753906
Epoch855 train_loss=38.998069763183594; test_loss=161.908447265625
Epoch856 train_loss=58.952537536621094; test_loss=162.03421020507812
Epoch857 train_loss=36.959571838378906; test_loss=163.58447265625
Epoch858 train_loss=29.58428955078125; test_loss=162.9874267578125
Epoch859 train_loss=59.1944580078125; test_loss=162.47525024414062
Epoch860 train_loss=93.28201293945312; test_loss=163.16661071777344
Epoch861 train_loss=28.598880767822266; test_loss=162.3208770751953
Epoch862 train_loss=64.63041687011719; test_loss=161.28468322753906
Epoch863 train_loss=69.143310546875; test_loss=163.10211181640625
Epoch864 train_loss=93.34854125976562; test_loss=165.67698669433594
Epoch865 train_loss=29.79953956604004; test_loss=166.1992645263672
Epoch866 train_loss=63.28025817871094; test_loss=160.5810546875
Epoch867 train_loss=13.931173324584961; test_loss=167.87222290039062
Epoch868 train_loss=25.14739990234375; test_loss=164.45175170898438
Epoch869 train_loss=52.34174346923828; test_loss=168.0074462890625
Epoch870 train_loss=83.09056854248047; test_loss=162.94110107421875
Epoch871 train_loss=162.16026306152344; test_loss=162.2620391845703
Epoch872 train_loss=129.26300048828125; test_loss=165.8019561767578
Epoch873 train_loss=57.79187774658203; test_loss=164.19993591308594
Epoch874 train_loss=64.95466613769531; test_loss=163.68080139160156
Epoch875 train_loss=141.027587890625; test_loss=162.75326538085938
Epoch876 train_loss=36.460968017578125; test_loss=161.0113525390625
Epoch877 train_loss=213.60205078125; test_loss=165.1531524658203
Epoch878 train_loss=151.63967895507812; test_loss=164.11976623535156
Epoch879 train_loss=65.077392578125; test_loss=164.91319274902344
Epoch880 train_loss=143.27069091796875; test_loss=160.90737915039062
Epoch881 train_loss=208.74603271484375; test_loss=163.45150756835938
Epoch882 train_loss=153.36866760253906; test_loss=163.41624450683594
Epoch883 train_loss=60.33961486816406; test_loss=162.71273803710938
Epoch884 train_loss=404.638916015625; test_loss=163.6589813232422
Epoch885 train_loss=50.07240295410156; test_loss=163.6611785888672
Epoch886 train_loss=26.80219078063965; test_loss=163.41163635253906
Epoch887 train_loss=71.7041244506836; test_loss=167.43649291992188
Epoch888 train_loss=34.07947540283203; test_loss=162.44094848632812
Epoch889 train_loss=29.380722045898438; test_loss=162.73300170898438
Epoch890 train_loss=229.87664794921875; test_loss=166.4732666015625
Epoch891 train_loss=72.24378967285156; test_loss=167.62303161621094
Epoch892 train_loss=99.66624450683594; test_loss=165.849853515625
Epoch893 train_loss=39.951011657714844; test_loss=163.6820526123047
Epoch894 train_loss=92.74038696289062; test_loss=162.34434509277344
Epoch895 train_loss=14.192474365234375; test_loss=160.0554656982422
Epoch896 train_loss=95.56563568115234; test_loss=168.70330810546875
Epoch897 train_loss=146.87039184570312; test_loss=162.15992736816406
Epoch898 train_loss=210.5002899169922; test_loss=164.32354736328125
Epoch899 train_loss=159.3033447265625; test_loss=166.09674072265625
Epoch900 train_loss=227.53329467773438; test_loss=166.564208984375
Epoch901 train_loss=128.5237274169922; test_loss=170.43319702148438
Epoch902 train_loss=96.734619140625; test_loss=164.74575805664062
Epoch903 train_loss=146.7286376953125; test_loss=163.61997985839844
Epoch904 train_loss=133.71975708007812; test_loss=163.71775817871094
Epoch905 train_loss=95.26795959472656; test_loss=164.93272399902344
Epoch906 train_loss=131.39578247070312; test_loss=165.95974731445312
Epoch907 train_loss=158.47604370117188; test_loss=165.35780334472656
Epoch908 train_loss=94.18156433105469; test_loss=161.96847534179688
Epoch909 train_loss=263.8731689453125; test_loss=162.2457275390625
Epoch910 train_loss=146.26800537109375; test_loss=164.38369750976562
Epoch911 train_loss=563.570068359375; test_loss=162.8902587890625
Epoch912 train_loss=60.38676452636719; test_loss=165.36227416992188
Epoch913 train_loss=35.53769302368164; test_loss=166.61453247070312
Epoch914 train_loss=64.91392517089844; test_loss=164.8280487060547
Epoch915 train_loss=301.81182861328125; test_loss=165.6776123046875
Epoch916 train_loss=125.91954040527344; test_loss=165.32240295410156
Epoch917 train_loss=144.7598114013672; test_loss=167.6660919189453
Epoch918 train_loss=127.85418701171875; test_loss=166.14247131347656
Epoch919 train_loss=83.03150177001953; test_loss=164.45358276367188
Epoch920 train_loss=166.53213500976562; test_loss=165.37841796875
Epoch921 train_loss=62.481876373291016; test_loss=163.1998748779297
Epoch922 train_loss=61.82310485839844; test_loss=165.28204345703125
Epoch923 train_loss=64.87937927246094; test_loss=164.3711395263672
Epoch924 train_loss=62.654449462890625; test_loss=163.3860626220703
Epoch925 train_loss=234.9334716796875; test_loss=162.18943786621094
Epoch926 train_loss=86.55197143554688; test_loss=161.58456420898438
Epoch927 train_loss=551.904296875; test_loss=161.76828002929688
Epoch928 train_loss=50.690284729003906; test_loss=161.97471618652344
Epoch929 train_loss=117.99625396728516; test_loss=163.2555389404297
Epoch930 train_loss=75.29363250732422; test_loss=162.83680725097656
Epoch931 train_loss=16.530624389648438; test_loss=162.9434356689453
Epoch932 train_loss=230.769775390625; test_loss=165.9027557373047
Epoch933 train_loss=22.88762664794922; test_loss=163.50152587890625
Epoch934 train_loss=322.85504150390625; test_loss=163.63436889648438
Epoch935 train_loss=57.207916259765625; test_loss=166.7723846435547
Epoch936 train_loss=21.315818786621094; test_loss=163.6780548095703
Epoch937 train_loss=118.77452087402344; test_loss=165.4739532470703
Epoch938 train_loss=108.8807373046875; test_loss=163.87521362304688
Epoch939 train_loss=64.38594055175781; test_loss=165.12098693847656
Epoch940 train_loss=71.81400299072266; test_loss=167.21983337402344
Epoch941 train_loss=59.77873992919922; test_loss=161.59909057617188
Epoch942 train_loss=105.4820556640625; test_loss=164.4986114501953
Epoch943 train_loss=194.98292541503906; test_loss=166.98580932617188
Epoch944 train_loss=69.77041625976562; test_loss=163.1902313232422
Epoch945 train_loss=159.27752685546875; test_loss=163.06373596191406
Epoch946 train_loss=23.28984832763672; test_loss=163.5952606201172
Epoch947 train_loss=212.15155029296875; test_loss=164.2005615234375
Epoch948 train_loss=92.61627960205078; test_loss=167.11912536621094
Epoch949 train_loss=112.253173828125; test_loss=163.81307983398438
Epoch950 train_loss=202.7069091796875; test_loss=168.36509704589844
Epoch951 train_loss=49.56542205810547; test_loss=169.12388610839844
Epoch952 train_loss=78.70474243164062; test_loss=164.4727020263672
Epoch953 train_loss=71.54519653320312; test_loss=165.7707977294922
Epoch954 train_loss=64.59182739257812; test_loss=164.8453826904297
Epoch955 train_loss=15.487140655517578; test_loss=167.6258087158203
Epoch956 train_loss=49.12738037109375; test_loss=165.81106567382812
Epoch957 train_loss=19.09221649169922; test_loss=166.909423828125
Epoch958 train_loss=38.197425842285156; test_loss=164.46583557128906
Epoch959 train_loss=81.46546173095703; test_loss=164.16949462890625
Epoch960 train_loss=52.68781280517578; test_loss=165.06497192382812
Epoch961 train_loss=177.7039794921875; test_loss=166.32958984375
Epoch962 train_loss=54.99650192260742; test_loss=167.57162475585938
Epoch963 train_loss=43.57887268066406; test_loss=162.99256896972656
Epoch964 train_loss=97.95683288574219; test_loss=168.46287536621094
Epoch965 train_loss=31.950729370117188; test_loss=168.35728454589844
Epoch966 train_loss=158.96873474121094; test_loss=166.5196533203125
Epoch967 train_loss=246.61489868164062; test_loss=165.54153442382812
Epoch968 train_loss=28.375988006591797; test_loss=169.4011993408203
Epoch969 train_loss=42.47595977783203; test_loss=168.10752868652344
Epoch970 train_loss=14.85223388671875; test_loss=168.38682556152344
Epoch971 train_loss=165.11264038085938; test_loss=165.58740234375
Epoch972 train_loss=57.379493713378906; test_loss=163.55355834960938
Epoch973 train_loss=82.20523071289062; test_loss=166.3671112060547
Epoch974 train_loss=95.60138702392578; test_loss=164.18667602539062
Epoch975 train_loss=98.06976318359375; test_loss=164.41268920898438
Epoch976 train_loss=174.70281982421875; test_loss=165.57997131347656
Epoch977 train_loss=78.60359191894531; test_loss=164.75694274902344
Epoch978 train_loss=98.73217010498047; test_loss=165.66705322265625
Epoch979 train_loss=106.4249267578125; test_loss=165.9139404296875
Epoch980 train_loss=52.92038345336914; test_loss=165.9442596435547
Epoch981 train_loss=102.73323822021484; test_loss=164.9256134033203
Epoch982 train_loss=40.756980895996094; test_loss=165.5846710205078
Epoch983 train_loss=80.56310272216797; test_loss=165.99034118652344
Epoch984 train_loss=185.11300659179688; test_loss=166.5141143798828
Epoch985 train_loss=58.40525817871094; test_loss=167.8382110595703
Epoch986 train_loss=27.75453758239746; test_loss=167.2163848876953
Epoch987 train_loss=80.20669555664062; test_loss=167.78871154785156
Epoch988 train_loss=24.18891716003418; test_loss=164.39613342285156
Epoch989 train_loss=75.42919158935547; test_loss=166.72317504882812
Epoch990 train_loss=218.1950225830078; test_loss=169.15379333496094
Epoch991 train_loss=38.50274658203125; test_loss=163.47743225097656
Epoch992 train_loss=41.77449035644531; test_loss=165.35475158691406
Epoch993 train_loss=100.34135437011719; test_loss=164.97283935546875
Epoch994 train_loss=124.09494018554688; test_loss=168.5535430908203
Epoch995 train_loss=199.25616455078125; test_loss=166.17813110351562
Epoch996 train_loss=83.5242691040039; test_loss=163.9271697998047
Epoch997 train_loss=78.98040771484375; test_loss=166.73098754882812
Epoch998 train_loss=50.99474334716797; test_loss=166.7332000732422
Epoch999 train_loss=163.26983642578125; test_loss=167.60643005371094
Epoch1000 train_loss=109.94534301757812; test_loss=166.21173095703125
------------------------------------------- TRAINING SCORES -------------------------------------------
Overall MAE: 127.04247235107422 +/- 99.90102627944705
Overall RMSE: 175.43406332778932 +/- 154.28729810850598
Overall MAPE: 15.875287336111068 +/- 19.4009852555714
Overall R2: 69.11674812436104 +/- 80.51793512312459
------------------------------------------- TESTING SCORES -------------------------------------------
Overall MAE: 162.0754659729004 +/- 7.1560650020547785
Overall RMSE: 266.23735757446286 +/- 8.60939057357635
Overall MAPE: 21.138779476165773 +/- 1.1536040792023885
Overall R2: 85.19760499596596 +/- 1.0113892271931464
Wall time: 11min 41s
%%time
# here are the best parameters for the DNN 45 vars SCALED hyperparameter search:
# lr: 0.0012782353276565707
# batch_size: 16
# n_layers: 5
# neurons_HL1: 638
# HL0_ac_fn: relu
# HL1_ac_fn: relu
# HL2_ac_fn: linear
# HL3_ac_fn: linear
# HL4_ac_fn: linear
# Sequential(
# (0): Linear(in_features=45, out_features=638, bias=True)
# (1): ReLU()
# (2): Linear(in_features=638, out_features=319, bias=True)
# (3): ReLU()
# (4): Linear(in_features=319, out_features=159, bias=True)
# (5): Linear(in_features=159, out_features=79, bias=True)
# (6): Linear(in_features=79, out_features=39, bias=True)
# (7): Linear(in_features=39, out_features=1, bias=True)
# )
metrics = [RootMeanSquaredError(), "mean_absolute_percentage_error",
"mean_absolute_error"]
num_folds = 10
kfold = KFold(n_splits=10, shuffle=True, random_state=1)
# we define the cross validator, and other variables:
lr = 0.0012782353276565707
batch_size = 16
num_epochs = 100
num_folds = 10
kfold = KFold(n_splits=num_folds, shuffle=True, random_state=1)
fold_num = 1
MAPE_train = []
MAPE_scores = []
RMSE_train = []
RMSE_scores = []
MAE_train = []
MAE_scores = []
R2_train = []
R2_scores = []
low_MAPE_scores = []
low_RMSE_scores = []
low_MAE_scores = []
low_R2_scores = []
med_MAPE_scores = []
med_RMSE_scores = []
med_MAE_scores = []
med_R2_scores = []
high_MAPE_scores = []
high_RMSE_scores = []
high_MAE_scores = []
high_R2_scores = []
for train, test in kfold.split(minmax_X, Y):
# now our data is ready to go into our model.
model = Sequential([
Dense(638, activation='relu', input_shape=(minmax_X.shape[1],)),
Dense(319, activation='relu'),
Dense(159, activation='linear'),
Dense(79, activation='linear'),
Dense(39, activation='linear'),
Dense(1, activation='linear')
])
model.compile(loss='mae', optimizer=Adam(learning_rate=lr), metrics=metrics)
history = model.fit(minmax_X[train], Y[train], batch_size=16, epochs=100, verbose=False)
scores = model.evaluate(minmax_X[test], Y[test], verbose=False)
# we split the Y[test] into 3 inclusive volumes: low (0-299), medium (300-750), high (>= 751)
# here test and Y[test] are numpy ndarrays
# test has shape (675,), Y[test] has size around (675,45)
# Y[test][0] is the same as calling Y[test[0]] which has shape (45,)
# print("test.shape", test.shape)
# print(test)
# print("Y[test].shape",Y[test].shape)
# print(Y[test])
low_Y_test_mask = np.all([Y[test] >= 0, Y[test] <=299], axis=0)
med_Y_test_mask = np.all([Y[test] >= 300, Y[test] <=750], axis=0)
high_Y_test_mask = np.all([Y[test] >= 751],axis=0)
# print(low_Y_test_mask.shape)
# print(low_Y_test_mask)
# print("Y[test][low_Y_test_mask].shape",Y[test][low_Y_test_mask].shape)
# print(Y[test][low_Y_test_mask])
# print(med_Y_test_mask.shape)
# print(med_Y_test_mask)
# print("Y[test][med_Y_test_mask].shape",Y[test][med_Y_test_mask].shape)
# print(Y[test][med_Y_test_mask])
# print(high_Y_test_mask.shape)
# print(high_Y_test_mask)
# print("Y[test][high_Y_test_mask].shape",Y[test][high_Y_test_mask].shape)
# print(Y[test][high_Y_test_mask])
low_scores = model.evaluate(minmax_X[test][low_Y_test_mask], Y[test][low_Y_test_mask], verbose=False)
med_scores = model.evaluate(minmax_X[test][med_Y_test_mask], Y[test][med_Y_test_mask], verbose=False)
high_scores = model.evaluate(minmax_X[test][high_Y_test_mask], Y[test][high_Y_test_mask], verbose=False)
low_MAPE_scores.append(low_scores[2])
low_RMSE_scores.append(low_scores[1])
low_MAE_scores.append(low_scores[0])
low_R2_scores.append(r2_score(Y[test][low_Y_test_mask], model.predict(minmax_X[test][low_Y_test_mask])))
med_MAPE_scores.append(med_scores[2])
med_RMSE_scores.append(med_scores[1])
med_MAE_scores.append(med_scores[0])
med_R2_scores.append(r2_score(Y[test][med_Y_test_mask], model.predict(minmax_X[test][med_Y_test_mask])))
high_MAPE_scores.append(high_scores[2])
high_RMSE_scores.append(high_scores[1])
high_MAE_scores.append(high_scores[0])
high_R2_scores.append(r2_score(Y[test][high_Y_test_mask], model.predict(minmax_X[test][high_Y_test_mask])))
train_r2 = r2_score(Y[train], model.predict(minmax_X[train]))
R2_train.append(train_r2)
score_r2 = r2_score(Y[test], model.predict(minmax_X[test]))
R2_scores.append(score_r2)
MAPE_train.append(history.history.get(
'mean_absolute_percentage_error')[-1])
MAPE_scores.append(scores[2])
RMSE_train.append(history.history.get('root_mean_squared_error')[-1])
RMSE_scores.append(scores[1])
MAE_train.append(history.history.get('loss')[-1])
MAE_scores.append(scores[0])
print(
f"------------------------------------------Fold {fold_num}------------------------------------------")
print(
f"\tTrain Loss: {history.history.get('loss')[-1]}\tScore Loss: {scores[0]}")
print(
f"\tTrain RMSE: {history.history.get('root_mean_squared_error')[-1]}\tScore RMSE: {scores[1]}")
print(
f"\tTrain MAPE: {history.history.get('mean_absolute_percentage_error')[-1]}\tScore MAPE: {scores[2]}")
if scores[2] == 100.0:
print(history.history.get('mean_absolute_percentage_error'))
print(f"\tTrain R2: {train_r2}\tScore R2: {score_r2}")
print(f"\n\t3-VOLUME SCORES: LOW (n={Y[test][low_Y_test_mask].shape[0]}), MED (n={Y[test][med_Y_test_mask].shape[0]}), HIGH (n={Y[test][high_Y_test_mask].shape[0]}), TOTAL IN FOLD (n={test.shape[0]})")
print(f"\tLow Loss: {low_MAE_scores[-1]}\tMed Loss: {med_MAE_scores[-1]}\tHigh Loss: {high_MAE_scores[-1]}")
print(f"\tLow RMSE: {low_RMSE_scores[-1]}\tMed RMSE: {med_RMSE_scores[-1]}\tHigh RMSE: {high_RMSE_scores[-1]}")
print(f"\tLow MAPE: {low_MAPE_scores[-1]}\tMed MAPE: {med_MAPE_scores[-1]}\tHigh MAPE: {high_MAPE_scores[-1]}")
print(f"\tLow R2: {low_R2_scores[-1]}\tMed R2: {med_R2_scores[-1]}\tHigh R2: {high_R2_scores[-1]}")
fold_num += 1
print(f"\n\nLow MAE Average: {np.mean(low_MAE_scores)} +/- {np.std(low_MAE_scores)}")
print(f"Low RMSE Average: {np.mean(low_RMSE_scores)} +/- {np.std(low_RMSE_scores)}")
print(f"Low MAPE Average: {np.mean(low_MAPE_scores)} +/- {np.std(low_MAPE_scores)}")
print(f"Low R2 Average: {np.mean(low_R2_scores)*100} +/- {np.std(low_R2_scores)*100}\n")
print(f"\nMed MAE Average: {np.mean(med_MAE_scores)} +/- {np.std(med_MAE_scores)}")
print(f"Med RMSE Average: {np.mean(med_RMSE_scores)} +/- {np.std(med_RMSE_scores)}")
print(f"Med MAPE Average: {np.mean(med_MAPE_scores)} +/- {np.std(med_MAPE_scores)}")
print(f"Med R2 Average: {np.mean(med_R2_scores)*100} +/- {np.std(med_R2_scores)*100}\n")
print(f"\nHigh MAE Average: {np.mean(high_MAE_scores)} +/- {np.std(high_MAE_scores)}")
print(f"High RMSE Average: {np.mean(high_RMSE_scores)} +/- {np.std(high_RMSE_scores)}")
print(f"High MAPE Average: {np.mean(high_MAPE_scores)} +/- {np.std(high_MAPE_scores)}")
print(f"High R2 Average: {np.mean(high_R2_scores)*100} +/- {np.std(high_R2_scores)*100}\n")
print(f"\n\nOverall MAE: {np.mean(MAE_scores)} +/- {np.std(MAE_scores)}")
print(f"Overall RMSE: {np.mean(RMSE_scores)} +/- {np.std(RMSE_scores)}")
print(f"Overall MAPE: {np.mean(MAPE_scores)} +/- {np.std(MAPE_scores)}")
print(f"Overall R2: {np.mean(R2_scores)*100} +/- {np.std(R2_scores)*100}\n\n")
print("Delimited table:")
print("MAPE\tRMSE\tMAE\tR2")
for i in range(0, 10):
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f" %
(MAPE_train[i], MAPE_scores[i], RMSE_train[i], RMSE_scores[i], MAE_train[i], MAE_scores[i], R2_train[i], R2_scores[i]))
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t<===Averages" % (np.mean(MAPE_train), np.mean(MAPE_scores),
np.mean(RMSE_train), np.mean(RMSE_scores), np.mean(MAE_train), np.mean(MAE_scores), np.mean(R2_train), np.mean(R2_scores)))
------------------------------------------Fold 1------------------------------------------ Train Loss: 144.1322479248047 Score Loss: 162.7932586669922 Train RMSE: 247.11766052246094 Score RMSE: 262.96533203125 Train MAPE: 18.143918991088867 Score MAPE: 24.606538772583008 Train R2: 0.8818119686352033 Score R2: 0.8634852821224617 3-VOLUME SCORES: LOW (n=88), MED (n=178), HIGH (n=409), TOTAL IN FOLD (n=675) Low Loss: 89.98303985595703 Med Loss: 163.80369567871094 High Loss: 178.01931762695312 Low RMSE: 216.56761169433594 Med RMSE: 284.5936279296875 High RMSE: 262.2666931152344 Low MAPE: 66.7392349243164 Med MAPE: 30.451616287231445 High MAPE: 12.997488975524902 Low R2: -8.853872295057585 Med R2: -4.214074467369873 High R2: 0.7926740882118839 ------------------------------------------Fold 2------------------------------------------ Train Loss: 148.69894409179688 Score Loss: 141.33033752441406 Train RMSE: 255.9603729248047 Score RMSE: 220.44091796875 Train MAPE: 18.809551239013672 Score MAPE: 18.31383514404297 Train R2: 0.8797808154106651 Score R2: 0.8969344943422434 3-VOLUME SCORES: LOW (n=94), MED (n=166), HIGH (n=415), TOTAL IN FOLD (n=675) Low Loss: 37.79164505004883 Med Loss: 135.34317016601562 High Loss: 167.17733764648438 Low RMSE: 51.12791061401367 Med RMSE: 236.17117309570312 High RMSE: 236.93020629882812 Low MAPE: 29.029926300048828 Med MAPE: 27.327713012695312 High MAPE: 12.281028747558594 Low R2: 0.46026328711499187 Med R2: -2.2644953511233057 High R2: 0.8089711686846692 ------------------------------------------Fold 3------------------------------------------ Train Loss: 148.1610870361328 Score Loss: 153.02464294433594 Train RMSE: 251.3577880859375 Score RMSE: 268.42291259765625 Train MAPE: 18.67521858215332 Score MAPE: 21.069904327392578 Train R2: 0.8734606786342296 Score R2: 0.8650387544337905 3-VOLUME SCORES: LOW (n=100), MED (n=159), HIGH (n=416), TOTAL IN FOLD (n=675) Low Loss: 60.368621826171875 Med Loss: 160.4123992919922 High Loss: 172.47402954101562 Low RMSE: 131.86077880859375 Med RMSE: 324.3493347167969 High RMSE: 269.2957763671875 Low MAPE: 40.568336486816406 Med MAPE: 32.326873779296875 High MAPE: 12.080241203308105 Low R2: -3.094724311931585 Med R2: -5.18790327388158 High R2: 0.7920780471648761 ------------------------------------------Fold 4------------------------------------------ Train Loss: 147.42608642578125 Score Loss: 156.72793579101562 Train RMSE: 254.05712890625 Score RMSE: 265.04168701171875 Train MAPE: 18.76193618774414 Score MAPE: 27.48409080505371 Train R2: 0.881158714723481 Score R2: 0.85485231207718 3-VOLUME SCORES: LOW (n=98), MED (n=164), HIGH (n=413), TOTAL IN FOLD (n=675) Low Loss: 69.82920837402344 Med Loss: 132.7421875 High Loss: 186.87261962890625 Low RMSE: 168.302978515625 Med RMSE: 270.81103515625 High RMSE: 281.01043701171875 Low MAPE: 89.77782440185547 Med MAPE: 25.209856033325195 High MAPE: 13.605618476867676 Low R2: -4.925661523706983 Med R2: -4.5105119565788705 High R2: 0.7324612176007474 ------------------------------------------Fold 5------------------------------------------ Train Loss: 146.00184631347656 Score Loss: 183.94680786132812 Train RMSE: 249.99966430664062 Score RMSE: 315.8504638671875 Train MAPE: 18.71318244934082 Score MAPE: 24.560184478759766 Train R2: 0.83841511145667 Score R2: 0.8067701873980474 3-VOLUME SCORES: LOW (n=107), MED (n=147), HIGH (n=421), TOTAL IN FOLD (n=675) Low Loss: 65.74545288085938 Med Loss: 152.2670135498047 High Loss: 225.05007934570312 Low RMSE: 168.07989501953125 Med RMSE: 312.6748352050781 High RMSE: 344.43194580078125 Low MAPE: 44.026649475097656 Med MAPE: 30.334226608276367 High MAPE: 17.59653663635254 Low R2: -5.383754918510242 Med R2: -4.58582957876006 High R2: 0.6518320507675018 ------------------------------------------Fold 6------------------------------------------ Train Loss: 149.6063690185547 Score Loss: 148.86585998535156 Train RMSE: 254.6150665283203 Score RMSE: 253.24661254882812 Train MAPE: 18.894336700439453 Score MAPE: 19.360719680786133 Train R2: 0.8809863540297797 Score R2: 0.8631003042870936 3-VOLUME SCORES: LOW (n=101), MED (n=164), HIGH (n=410), TOTAL IN FOLD (n=675) Low Loss: 54.05596923828125 Med Loss: 123.51362609863281 High Loss: 182.3623504638672 Low RMSE: 105.52284240722656 Med RMSE: 257.20758056640625 High RMSE: 276.3709411621094 Low MAPE: 32.620849609375 Med MAPE: 24.946340560913086 High MAPE: 13.85995101928711 Low R2: -1.86400881803548 Med R2: -3.1276641691541816 High R2: 0.7554567709103881 ------------------------------------------Fold 7------------------------------------------ Train Loss: 146.719970703125 Score Loss: 163.76956176757812 Train RMSE: 249.390625 Score RMSE: 282.154052734375 Train MAPE: 18.77730941772461 Score MAPE: 20.990360260009766 Train R2: 0.8765210385698169 Score R2: 0.8410689900692148 3-VOLUME SCORES: LOW (n=87), MED (n=155), HIGH (n=432), TOTAL IN FOLD (n=674) Low Loss: 62.275367736816406 Med Loss: 200.96417236328125 High Loss: 170.8640594482422 Low RMSE: 128.5178985595703 Med RMSE: 394.1260070800781 High RMSE: 255.24046325683594 Low MAPE: 33.890342712402344 Med MAPE: 38.2117805480957 High MAPE: 12.213459014892578 Low R2: -2.7601473457991297 Med R2: -8.046361259580252 High R2: 0.7962877855831062 ------------------------------------------Fold 8------------------------------------------ Train Loss: 145.93109130859375 Score Loss: 153.470947265625 Train RMSE: 251.85423278808594 Score RMSE: 257.8077392578125 Train MAPE: 18.58513641357422 Score MAPE: 21.94877052307129 Train R2: 0.8733324076744613 Score R2: 0.859519104283986 3-VOLUME SCORES: LOW (n=111), MED (n=157), HIGH (n=406), TOTAL IN FOLD (n=674) Low Loss: 69.05165100097656 Med Loss: 116.05615234375 High Loss: 191.01937866210938 Low RMSE: 148.56698608398438 Med RMSE: 218.6107635498047 High RMSE: 292.9556579589844 Low MAPE: 46.56816864013672 Med MAPE: 22.56185531616211 High MAPE: 14.9807710647583 Low R2: -3.5294200554294664 Med R2: -2.3734597032453313 High R2: 0.7291449006457036 ------------------------------------------Fold 9------------------------------------------ Train Loss: 147.30319213867188 Score Loss: 157.85643005371094 Train RMSE: 250.8709716796875 Score RMSE: 268.1253356933594 Train MAPE: 18.611648559570312 Score MAPE: 21.750507354736328 Train R2: 0.8745322063204004 Score R2: 0.8495096465432657 3-VOLUME SCORES: LOW (n=109), MED (n=176), HIGH (n=389), TOTAL IN FOLD (n=674) Low Loss: 65.2680435180664 Med Loss: 153.72299194335938 High Loss: 185.67034912109375 Low RMSE: 180.80435180664062 Med RMSE: 300.9421081542969 High RMSE: 272.8116455078125 Low MAPE: 36.173980712890625 Med MAPE: 31.060155868530273 High MAPE: 13.4968843460083 Low R2: -8.211648790053177 Med R2: -4.48159657408957 High R2: 0.7537647514025372 ------------------------------------------Fold 10------------------------------------------ Train Loss: 146.05517578125 Score Loss: 159.4729766845703 Train RMSE: 254.22254943847656 Score RMSE: 247.98599243164062 Train MAPE: 19.108728408813477 Score MAPE: 19.14328956604004 Train R2: 0.8675125958887148 Score R2: 0.8796039283779903 3-VOLUME SCORES: LOW (n=79), MED (n=160), HIGH (n=435), TOTAL IN FOLD (n=674) Low Loss: 69.8599624633789 Med Loss: 127.7369613647461 High Loss: 187.4205322265625 Low RMSE: 118.2153549194336 Med RMSE: 244.1698455810547 High RMSE: 266.11712646484375 Low MAPE: 38.8303108215332 Med MAPE: 25.241439819335938 High MAPE: 13.32494831085205 Low R2: -3.1224204467944334 Med R2: -2.907660835090062 High R2: 0.7775272026465918 Low MAE Average: 64.422896194458 +/- 12.544900866086037 Low RMSE Average: 141.7566608428955 +/- 43.496260833980855 Low MAPE Average: 45.82256240844727 +/- 17.724567115055887 Low R2 Average: -412.85395218203087 +/- 267.7857755437134 Med MAE Average: 146.6562370300293 +/- 23.802331583331284 Med RMSE Average: 284.3656311035156 +/- 48.89343068725408 Med MAPE Average: 28.767185783386232 +/- 4.389589156300765 Med R2 Average: -416.9955716887308 +/- 161.21001966761042 High MAE Average: 184.69300537109376 +/- 15.42456979169979 High RMSE Average: 275.7430892944336 +/- 27.007039563831317 High MAPE Average: 13.643692779541016 +/- 1.5604006857707917 High R2 Average: 75.90197983618006 +/- 4.4248634966913105 Overall MAE: 158.1258758544922 +/- 10.675798049221576 Overall RMSE: 264.2041046142578 +/- 23.160702380613632 Overall MAPE: 21.92282009124756 +/- 2.7208087284754323 Overall R2: 85.79883003935272 +/- 2.2547113352745733 Delimited table: MAPE RMSE MAE R2 18.14/24.61 247.12/262.97 144.13/162.79 0.88/0.86 18.81/18.31 255.96/220.44 148.70/141.33 0.88/0.90 18.68/21.07 251.36/268.42 148.16/153.02 0.87/0.87 18.76/27.48 254.06/265.04 147.43/156.73 0.88/0.85 18.71/24.56 250.00/315.85 146.00/183.95 0.84/0.81 18.89/19.36 254.62/253.25 149.61/148.87 0.88/0.86 18.78/20.99 249.39/282.15 146.72/163.77 0.88/0.84 18.59/21.95 251.85/257.81 145.93/153.47 0.87/0.86 18.61/21.75 250.87/268.13 147.30/157.86 0.87/0.85 19.11/19.14 254.22/247.99 146.06/159.47 0.87/0.88 18.71/21.92 251.94/264.20 147.00/158.13 0.87/0.86 <===Averages Wall time: 5min 29s
pca_study_name = "PCA_25_all_hp_select_1st_layer_only_TPE_sampler"
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
n_epochs = 100
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def pca_objective_fn(trial):
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_float("lr", 1e-3, 1e-1, log=True)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
n_layers = trial.suggest_int('n_layers', 2, 5)
layers = []
in_features = 25
out_features = 0
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
layers.append(torch.nn.Linear(out_features, 1))
pca_model = torch.nn.Sequential(*layers).to(device)
# print(pca_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(pca_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
pca_model.train()
# we create a random permutation of numbers from PCA_X_train.size()
permutation = torch.randperm(PCA_X_train.size()[0])
# we go through the batches.
for i in range(0, PCA_X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = PCA_X_train[indices], PCA_Y_train[indices]
# input PCA_X_train into dnn and get predictions.
train_prediction = pca_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
# now we test the model:
pca_model.eval()
test_prediction = pca_model(PCA_X_test.to(device))
test_loss = loss_fn(test_prediction, PCA_Y_test.to(device))
# if the current test loss determines the trial to be prunable, we prune it:
trial.report(test_loss, step=epoch)
if trial.should_prune():
raise optuna.TrialPruned()
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return test_loss
%%time
pca_study = optuna.create_study(sampler=optuna.samplers.TPESampler(seed=42),study_name=pca_study_name, direction='minimize')
pca_study.optimize(pca_objective_fn, n_trials=5000)
[I 2021-05-07 15:09:26,414] A new study created in memory with name: PCA_25_all_hp_select_1st_layer_only_TPE_sampler
[I 2021-05-07 15:10:03,635] Trial 0 finished with value: 168.740234375 and parameters: {'lr': 0.005611516415334507, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 60, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 0 with value: 168.740234375.
[I 2021-05-07 15:10:49,637] Trial 1 finished with value: 175.97586059570312 and parameters: {'lr': 0.08706020878304858, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 538, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 0 with value: 168.740234375.
[I 2021-05-07 15:11:26,524] Trial 2 finished with value: 166.1893768310547 and parameters: {'lr': 0.008168455894760165, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 624, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 2 with value: 166.1893768310547.
[I 2021-05-07 15:11:35,563] Trial 3 finished with value: 248.69874572753906 and parameters: {'lr': 0.041380401125610165, 'batch_size': 64, 'n_layers': 2, 'neurons_HL1': 508, 'HL0_ac_fn': 'linear', 'HL1_ac_fn': 'linear'}. Best is trial 2 with value: 166.1893768310547.
[I 2021-05-07 15:11:43,908] Trial 4 finished with value: 174.70448303222656 and parameters: {'lr': 0.004201672054372531, 'batch_size': 128, 'n_layers': 5, 'neurons_HL1': 964, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 2 with value: 166.1893768310547.
[I 2021-05-07 15:11:49,836] Trial 5 finished with value: 169.64419555664062 and parameters: {'lr': 0.005170191786366992, 'batch_size': 128, 'n_layers': 2, 'neurons_HL1': 1012, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 2 with value: 166.1893768310547.
[I 2021-05-07 15:11:51,379] Trial 6 pruned.
[I 2021-05-07 15:12:01,089] Trial 7 finished with value: 159.343994140625 and parameters: {'lr': 0.004470608546778492, 'batch_size': 64, 'n_layers': 2, 'neurons_HL1': 732, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:12:01,609] Trial 8 pruned.
[I 2021-05-07 15:13:01,244] Trial 9 finished with value: 164.01841735839844 and parameters: {'lr': 0.0021010799310103557, 'batch_size': 16, 'n_layers': 5, 'neurons_HL1': 192, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear', 'HL4_ac_fn': 'linear'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:13:01,485] Trial 10 pruned.
[I 2021-05-07 15:13:01,647] Trial 11 pruned.
[I 2021-05-07 15:13:29,030] Trial 12 finished with value: 174.7391357421875 and parameters: {'lr': 0.0021694153593080755, 'batch_size': 32, 'n_layers': 4, 'neurons_HL1': 258, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'linear'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:13:29,179] Trial 13 pruned.
[I 2021-05-07 15:13:29,357] Trial 14 pruned.
[I 2021-05-07 15:13:29,848] Trial 15 pruned.
[I 2021-05-07 15:13:30,140] Trial 16 pruned.
[I 2021-05-07 15:13:30,216] Trial 17 pruned.
[I 2021-05-07 15:13:30,386] Trial 18 pruned.
[I 2021-05-07 15:13:30,955] Trial 19 pruned.
[I 2021-05-07 15:13:31,873] Trial 20 pruned.
[I 2021-05-07 15:13:32,621] Trial 21 pruned.
[I 2021-05-07 15:13:33,366] Trial 22 pruned.
[I 2021-05-07 15:13:34,156] Trial 23 pruned.
[I 2021-05-07 15:13:34,623] Trial 24 pruned.
[I 2021-05-07 15:13:37,351] Trial 25 pruned.
[I 2021-05-07 15:13:37,814] Trial 26 pruned.
[I 2021-05-07 15:13:37,874] Trial 27 pruned.
[I 2021-05-07 15:13:38,041] Trial 28 pruned.
[I 2021-05-07 15:13:38,802] Trial 29 pruned.
[I 2021-05-07 15:13:42,046] Trial 30 pruned.
[I 2021-05-07 15:13:44,249] Trial 31 pruned.
[I 2021-05-07 15:13:45,142] Trial 32 pruned.
[I 2021-05-07 15:13:45,517] Trial 33 pruned.
[I 2021-05-07 15:13:45,897] Trial 34 pruned.
[I 2021-05-07 15:13:46,643] Trial 35 pruned.
[I 2021-05-07 15:13:46,716] Trial 36 pruned.
[I 2021-05-07 15:13:49,138] Trial 37 pruned.
[I 2021-05-07 15:13:49,242] Trial 38 pruned.
[I 2021-05-07 15:13:49,740] Trial 39 pruned.
[I 2021-05-07 15:13:49,842] Trial 40 pruned.
[I 2021-05-07 15:13:49,913] Trial 41 pruned.
[I 2021-05-07 15:13:49,982] Trial 42 pruned.
[I 2021-05-07 15:13:50,047] Trial 43 pruned.
[I 2021-05-07 15:13:50,107] Trial 44 pruned.
[I 2021-05-07 15:13:50,204] Trial 45 pruned.
[I 2021-05-07 15:13:50,768] Trial 46 pruned.
[I 2021-05-07 15:14:03,013] Trial 47 finished with value: 165.33193969726562 and parameters: {'lr': 0.0034911623232774134, 'batch_size': 64, 'n_layers': 3, 'neurons_HL1': 896, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:14:03,306] Trial 48 pruned.
[I 2021-05-07 15:14:03,440] Trial 49 pruned.
[I 2021-05-07 15:14:03,594] Trial 50 pruned.
[I 2021-05-07 15:14:03,826] Trial 51 pruned.
[I 2021-05-07 15:14:03,961] Trial 52 pruned.
[I 2021-05-07 15:14:04,332] Trial 53 pruned.
[I 2021-05-07 15:14:04,460] Trial 54 pruned.
[I 2021-05-07 15:14:04,653] Trial 55 pruned.
[I 2021-05-07 15:14:05,029] Trial 56 pruned.
[I 2021-05-07 15:14:05,540] Trial 57 pruned.
[I 2021-05-07 15:14:06,814] Trial 58 pruned.
[I 2021-05-07 15:14:06,872] Trial 59 pruned.
[I 2021-05-07 15:14:08,058] Trial 60 pruned.
[I 2021-05-07 15:14:08,156] Trial 61 pruned.
[I 2021-05-07 15:14:08,245] Trial 62 pruned.
[I 2021-05-07 15:14:08,341] Trial 63 pruned.
[I 2021-05-07 15:14:08,437] Trial 64 pruned.
[I 2021-05-07 15:14:09,052] Trial 65 pruned.
[I 2021-05-07 15:14:09,156] Trial 66 pruned.
[I 2021-05-07 15:14:09,702] Trial 67 pruned.
[I 2021-05-07 15:14:30,947] Trial 68 finished with value: 160.56297302246094 and parameters: {'lr': 0.0030323272643531874, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 988, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:14:31,146] Trial 69 pruned.
[I 2021-05-07 15:14:31,349] Trial 70 pruned.
[I 2021-05-07 15:14:31,781] Trial 71 pruned.
[I 2021-05-07 15:14:52,901] Trial 72 finished with value: 162.41238403320312 and parameters: {'lr': 0.0031802645450112474, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 974, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 7 with value: 159.343994140625.
[I 2021-05-07 15:14:53,130] Trial 73 pruned.
[I 2021-05-07 15:14:53,332] Trial 74 pruned.
[I 2021-05-07 15:14:53,735] Trial 75 pruned.
[I 2021-05-07 15:14:53,958] Trial 76 pruned.
[I 2021-05-07 15:14:54,156] Trial 77 pruned.
[I 2021-05-07 15:14:54,906] Trial 78 pruned.
[I 2021-05-07 15:14:55,122] Trial 79 pruned.
[I 2021-05-07 15:14:55,624] Trial 80 pruned.
[I 2021-05-07 15:14:55,699] Trial 81 pruned.
[I 2021-05-07 15:15:07,108] Trial 82 finished with value: 158.939208984375 and parameters: {'lr': 0.006408943445891679, 'batch_size': 64, 'n_layers': 2, 'neurons_HL1': 1012, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:15:07,238] Trial 83 pruned.
[I 2021-05-07 15:15:07,346] Trial 84 pruned.
[I 2021-05-07 15:15:07,472] Trial 85 pruned.
[I 2021-05-07 15:15:07,704] Trial 86 pruned.
[I 2021-05-07 15:15:07,808] Trial 87 pruned.
[I 2021-05-07 15:15:08,612] Trial 88 pruned.
[I 2021-05-07 15:15:20,458] Trial 89 finished with value: 165.49253845214844 and parameters: {'lr': 0.005635041222885405, 'batch_size': 64, 'n_layers': 3, 'neurons_HL1': 626, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:15:20,597] Trial 90 pruned.
[I 2021-05-07 15:15:20,854] Trial 91 pruned.
[I 2021-05-07 15:15:20,985] Trial 92 pruned.
[I 2021-05-07 15:15:21,095] Trial 93 pruned.
[I 2021-05-07 15:15:21,226] Trial 94 pruned.
[I 2021-05-07 15:15:21,594] Trial 95 pruned.
[I 2021-05-07 15:15:21,706] Trial 96 pruned.
[I 2021-05-07 15:15:21,904] Trial 97 pruned.
[I 2021-05-07 15:15:22,394] Trial 98 pruned.
[I 2021-05-07 15:15:32,044] Trial 99 finished with value: 164.01649475097656 and parameters: {'lr': 0.005834033401647169, 'batch_size': 64, 'n_layers': 2, 'neurons_HL1': 820, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:15:32,252] Trial 100 pruned.
[I 2021-05-07 15:15:32,365] Trial 101 pruned.
[I 2021-05-07 15:15:32,479] Trial 102 pruned.
[I 2021-05-07 15:15:32,586] Trial 103 pruned.
[I 2021-05-07 15:15:32,695] Trial 104 pruned.
[I 2021-05-07 15:15:33,105] Trial 105 pruned.
[I 2021-05-07 15:15:33,218] Trial 106 pruned.
[I 2021-05-07 15:15:33,447] Trial 107 pruned.
[I 2021-05-07 15:15:34,033] Trial 108 pruned.
[I 2021-05-07 15:15:34,141] Trial 109 pruned.
[I 2021-05-07 15:15:34,528] Trial 110 pruned.
[I 2021-05-07 15:15:34,633] Trial 111 pruned.
[I 2021-05-07 15:15:34,733] Trial 112 pruned.
[I 2021-05-07 15:15:34,805] Trial 113 pruned.
[I 2021-05-07 15:15:34,878] Trial 114 pruned.
[I 2021-05-07 15:15:35,735] Trial 115 pruned.
[I 2021-05-07 15:15:35,826] Trial 116 pruned.
[I 2021-05-07 15:15:35,970] Trial 117 pruned.
[I 2021-05-07 15:15:37,181] Trial 118 pruned.
[I 2021-05-07 15:15:37,378] Trial 119 pruned.
[I 2021-05-07 15:15:37,484] Trial 120 pruned.
[I 2021-05-07 15:15:37,762] Trial 121 pruned.
[I 2021-05-07 15:15:38,391] Trial 122 pruned.
[I 2021-05-07 15:15:38,706] Trial 123 pruned.
[I 2021-05-07 15:15:38,949] Trial 124 pruned.
[I 2021-05-07 15:15:39,033] Trial 125 pruned.
[I 2021-05-07 15:15:39,765] Trial 126 pruned.
[I 2021-05-07 15:15:40,706] Trial 127 pruned.
[I 2021-05-07 15:15:40,829] Trial 128 pruned.
[I 2021-05-07 15:15:40,960] Trial 129 pruned.
[I 2021-05-07 15:15:41,349] Trial 130 pruned.
[I 2021-05-07 15:15:41,817] Trial 131 pruned.
[I 2021-05-07 15:15:42,731] Trial 132 pruned.
[I 2021-05-07 15:15:43,206] Trial 133 pruned.
[I 2021-05-07 15:15:44,124] Trial 134 pruned.
[I 2021-05-07 15:15:44,404] Trial 135 pruned.
[I 2021-05-07 15:15:44,550] Trial 136 pruned.
[I 2021-05-07 15:15:45,356] Trial 137 pruned.
[I 2021-05-07 15:15:45,453] Trial 138 pruned.
[I 2021-05-07 15:15:45,583] Trial 139 pruned.
[I 2021-05-07 15:15:47,433] Trial 140 pruned.
[I 2021-05-07 15:15:47,537] Trial 141 pruned.
[I 2021-05-07 15:15:47,642] Trial 142 pruned.
[I 2021-05-07 15:15:47,746] Trial 143 pruned.
[I 2021-05-07 15:15:47,853] Trial 144 pruned.
[I 2021-05-07 15:15:47,964] Trial 145 pruned.
[I 2021-05-07 15:15:48,192] Trial 146 pruned.
[I 2021-05-07 15:15:48,258] Trial 147 pruned.
[I 2021-05-07 15:15:48,414] Trial 148 pruned.
[I 2021-05-07 15:15:49,052] Trial 149 pruned.
[I 2021-05-07 15:15:49,163] Trial 150 pruned.
[I 2021-05-07 15:15:49,274] Trial 151 pruned.
[I 2021-05-07 15:15:49,381] Trial 152 pruned.
[I 2021-05-07 15:15:49,507] Trial 153 pruned.
[I 2021-05-07 15:15:49,615] Trial 154 pruned.
[I 2021-05-07 15:15:49,803] Trial 155 pruned.
[I 2021-05-07 15:15:49,909] Trial 156 pruned.
[I 2021-05-07 15:15:49,970] Trial 157 pruned.
[I 2021-05-07 15:15:52,163] Trial 158 pruned.
[I 2021-05-07 15:15:52,267] Trial 159 pruned.
[I 2021-05-07 15:15:52,458] Trial 160 pruned.
[I 2021-05-07 15:15:52,844] Trial 161 pruned.
[I 2021-05-07 15:15:53,041] Trial 162 pruned.
[I 2021-05-07 15:15:53,242] Trial 163 pruned.
[I 2021-05-07 15:15:53,444] Trial 164 pruned.
[I 2021-05-07 15:15:53,913] Trial 165 pruned.
[I 2021-05-07 15:15:54,311] Trial 166 pruned.
[I 2021-05-07 15:15:54,417] Trial 167 pruned.
[I 2021-05-07 15:15:54,510] Trial 168 pruned.
[I 2021-05-07 15:15:54,902] Trial 169 pruned.
[I 2021-05-07 15:15:55,100] Trial 170 pruned.
[I 2021-05-07 15:15:55,255] Trial 171 pruned.
[I 2021-05-07 15:15:55,409] Trial 172 pruned.
[I 2021-05-07 15:15:55,563] Trial 173 pruned.
[I 2021-05-07 15:15:55,715] Trial 174 pruned.
[I 2021-05-07 15:15:55,859] Trial 175 pruned.
[I 2021-05-07 15:15:56,181] Trial 176 pruned.
[I 2021-05-07 15:15:56,302] Trial 177 pruned.
[I 2021-05-07 15:15:56,722] Trial 178 pruned.
[I 2021-05-07 15:15:56,824] Trial 179 pruned.
[I 2021-05-07 15:15:57,251] Trial 180 pruned.
[I 2021-05-07 15:15:57,466] Trial 181 pruned.
[I 2021-05-07 15:15:57,666] Trial 182 pruned.
[I 2021-05-07 15:15:57,871] Trial 183 pruned.
[I 2021-05-07 15:15:58,076] Trial 184 pruned.
[I 2021-05-07 15:15:58,184] Trial 185 pruned.
[I 2021-05-07 15:15:58,746] Trial 186 pruned.
[I 2021-05-07 15:15:58,852] Trial 187 pruned.
[I 2021-05-07 15:15:59,114] Trial 188 pruned.
[I 2021-05-07 15:15:59,233] Trial 189 pruned.
[I 2021-05-07 15:15:59,294] Trial 190 pruned.
[I 2021-05-07 15:15:59,694] Trial 191 pruned.
[I 2021-05-07 15:16:00,069] Trial 192 pruned.
[I 2021-05-07 15:16:00,813] Trial 193 pruned.
[I 2021-05-07 15:16:01,189] Trial 194 pruned.
[I 2021-05-07 15:16:01,563] Trial 195 pruned.
[I 2021-05-07 15:16:01,936] Trial 196 pruned.
[I 2021-05-07 15:16:02,155] Trial 197 pruned.
[I 2021-05-07 15:16:02,259] Trial 198 pruned.
[I 2021-05-07 15:16:03,000] Trial 199 pruned.
[I 2021-05-07 15:16:03,170] Trial 200 pruned.
[I 2021-05-07 15:16:04,007] Trial 201 pruned.
[I 2021-05-07 15:16:04,740] Trial 202 pruned.
[I 2021-05-07 15:16:06,607] Trial 203 pruned.
[I 2021-05-07 15:16:07,363] Trial 204 pruned.
[I 2021-05-07 15:16:45,746] Trial 205 finished with value: 162.0337677001953 and parameters: {'lr': 0.005330617762088191, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 704, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:16:46,536] Trial 206 pruned.
[I 2021-05-07 15:16:46,845] Trial 207 pruned.
[I 2021-05-07 15:16:46,956] Trial 208 pruned.
[I 2021-05-07 15:16:47,030] Trial 209 pruned.
[I 2021-05-07 15:16:47,146] Trial 210 pruned.
[I 2021-05-07 15:16:47,551] Trial 211 pruned.
[I 2021-05-07 15:16:47,949] Trial 212 pruned.
[I 2021-05-07 15:16:50,218] Trial 213 pruned.
[I 2021-05-07 15:16:50,622] Trial 214 pruned.
[I 2021-05-07 15:16:51,383] Trial 215 pruned.
[I 2021-05-07 15:16:51,619] Trial 216 pruned.
[I 2021-05-07 15:16:51,730] Trial 217 pruned.
[I 2021-05-07 15:16:52,107] Trial 218 pruned.
[I 2021-05-07 15:17:01,570] Trial 219 pruned.
[I 2021-05-07 15:17:02,036] Trial 220 pruned.
[I 2021-05-07 15:17:04,099] Trial 221 pruned.
[I 2021-05-07 15:17:04,297] Trial 222 pruned.
[I 2021-05-07 15:17:04,498] Trial 223 pruned.
[I 2021-05-07 15:17:04,889] Trial 224 pruned.
[I 2021-05-07 15:17:05,274] Trial 225 pruned.
[I 2021-05-07 15:17:05,477] Trial 226 pruned.
[I 2021-05-07 15:17:05,676] Trial 227 pruned.
[I 2021-05-07 15:17:05,877] Trial 228 pruned.
[I 2021-05-07 15:17:06,511] Trial 229 pruned.
[I 2021-05-07 15:17:06,624] Trial 230 pruned.
[I 2021-05-07 15:17:06,705] Trial 231 pruned.
[I 2021-05-07 15:17:07,092] Trial 232 pruned.
[I 2021-05-07 15:17:07,206] Trial 233 pruned.
[I 2021-05-07 15:17:07,588] Trial 234 pruned.
[I 2021-05-07 15:17:08,010] Trial 235 pruned.
[I 2021-05-07 15:17:08,301] Trial 236 pruned.
[I 2021-05-07 15:17:08,683] Trial 237 pruned.
[I 2021-05-07 15:17:08,921] Trial 238 pruned.
[I 2021-05-07 15:17:09,164] Trial 239 pruned.
[I 2021-05-07 15:17:09,241] Trial 240 pruned.
[I 2021-05-07 15:17:11,484] Trial 241 pruned.
[I 2021-05-07 15:17:12,244] Trial 242 pruned.
[I 2021-05-07 15:17:12,679] Trial 243 pruned.
[I 2021-05-07 15:17:13,062] Trial 244 pruned.
[I 2021-05-07 15:17:13,802] Trial 245 pruned.
[I 2021-05-07 15:17:14,171] Trial 246 pruned.
[I 2021-05-07 15:17:14,279] Trial 247 pruned.
[I 2021-05-07 15:17:16,478] Trial 248 pruned.
[I 2021-05-07 15:17:16,792] Trial 249 pruned.
[I 2021-05-07 15:17:16,906] Trial 250 pruned.
[I 2021-05-07 15:17:17,641] Trial 251 pruned.
[I 2021-05-07 15:17:18,125] Trial 252 pruned.
[I 2021-05-07 15:17:18,243] Trial 253 pruned.
[I 2021-05-07 15:17:18,322] Trial 254 pruned.
[I 2021-05-07 15:17:18,711] Trial 255 pruned.
[I 2021-05-07 15:17:18,864] Trial 256 pruned.
[I 2021-05-07 15:17:19,073] Trial 257 pruned.
[I 2021-05-07 15:17:19,664] Trial 258 pruned.
[I 2021-05-07 15:17:19,781] Trial 259 pruned.
[I 2021-05-07 15:17:19,977] Trial 260 pruned.
[I 2021-05-07 15:17:20,470] Trial 261 pruned.
[I 2021-05-07 15:17:20,586] Trial 262 pruned.
[I 2021-05-07 15:17:20,785] Trial 263 pruned.
[I 2021-05-07 15:17:20,849] Trial 264 pruned.
[I 2021-05-07 15:17:21,232] Trial 265 pruned.
[I 2021-05-07 15:17:21,395] Trial 266 pruned.
[I 2021-05-07 15:17:21,811] Trial 267 pruned.
[I 2021-05-07 15:17:22,232] Trial 268 pruned.
[I 2021-05-07 15:17:22,478] Trial 269 pruned.
[I 2021-05-07 15:17:23,741] Trial 270 pruned.
[I 2021-05-07 15:17:24,116] Trial 271 pruned.
[I 2021-05-07 15:17:24,228] Trial 272 pruned.
[I 2021-05-07 15:17:24,324] Trial 273 pruned.
[I 2021-05-07 15:17:24,517] Trial 274 pruned.
[I 2021-05-07 15:17:25,452] Trial 275 pruned.
[I 2021-05-07 15:17:25,558] Trial 276 pruned.
[I 2021-05-07 15:17:25,948] Trial 277 pruned.
[I 2021-05-07 15:17:26,228] Trial 278 pruned.
[I 2021-05-07 15:17:26,465] Trial 279 pruned.
[I 2021-05-07 15:17:26,891] Trial 280 pruned.
[I 2021-05-07 15:17:27,087] Trial 281 pruned.
[I 2021-05-07 15:17:27,199] Trial 282 pruned.
[I 2021-05-07 15:17:27,263] Trial 283 pruned.
[I 2021-05-07 15:17:28,229] Trial 284 pruned.
[I 2021-05-07 15:17:28,547] Trial 285 pruned.
[I 2021-05-07 15:17:28,661] Trial 286 pruned.
[I 2021-05-07 15:17:40,495] Trial 287 pruned.
[I 2021-05-07 15:17:40,623] Trial 288 pruned.
[I 2021-05-07 15:17:40,841] Trial 289 pruned.
[I 2021-05-07 15:17:41,670] Trial 290 pruned.
[I 2021-05-07 15:17:41,797] Trial 291 pruned.
[I 2021-05-07 15:17:42,321] Trial 292 pruned.
[I 2021-05-07 15:17:42,391] Trial 293 pruned.
[I 2021-05-07 15:17:42,918] Trial 294 pruned.
[I 2021-05-07 15:17:43,028] Trial 295 pruned.
[I 2021-05-07 15:17:43,217] Trial 296 pruned.
[I 2021-05-07 15:17:43,622] Trial 297 pruned.
[I 2021-05-07 15:17:43,732] Trial 298 pruned.
[I 2021-05-07 15:17:45,760] Trial 299 pruned.
[I 2021-05-07 15:17:46,082] Trial 300 pruned.
[I 2021-05-07 15:17:46,280] Trial 301 pruned.
[I 2021-05-07 15:17:46,661] Trial 302 pruned.
[I 2021-05-07 15:17:46,852] Trial 303 pruned.
[I 2021-05-07 15:17:47,316] Trial 304 pruned.
[I 2021-05-07 15:17:47,539] Trial 305 pruned.
[I 2021-05-07 15:17:47,694] Trial 306 pruned.
[I 2021-05-07 15:17:48,078] Trial 307 pruned.
[I 2021-05-07 15:17:48,144] Trial 308 pruned.
[I 2021-05-07 15:17:48,257] Trial 309 pruned.
[I 2021-05-07 15:17:48,506] Trial 310 pruned.
[I 2021-05-07 15:17:48,616] Trial 311 pruned.
[I 2021-05-07 15:17:49,045] Trial 312 pruned.
[I 2021-05-07 15:17:49,158] Trial 313 pruned.
[I 2021-05-07 15:17:49,484] Trial 314 pruned.
[I 2021-05-07 15:17:49,550] Trial 315 pruned.
[I 2021-05-07 15:17:49,659] Trial 316 pruned.
[I 2021-05-07 15:17:49,884] Trial 317 pruned.
[I 2021-05-07 15:17:50,022] Trial 318 pruned.
[I 2021-05-07 15:18:26,278] Trial 319 finished with value: 162.77919006347656 and parameters: {'lr': 0.005025246471120649, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 516, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:18:26,475] Trial 320 pruned.
[I 2021-05-07 15:18:27,082] Trial 321 pruned.
[I 2021-05-07 15:18:27,246] Trial 322 pruned.
[I 2021-05-07 15:18:29,259] Trial 323 pruned.
[I 2021-05-07 15:18:29,640] Trial 324 pruned.
[I 2021-05-07 15:18:29,705] Trial 325 pruned.
[I 2021-05-07 15:18:30,081] Trial 326 pruned.
[I 2021-05-07 15:18:30,189] Trial 327 pruned.
[I 2021-05-07 15:18:30,569] Trial 328 pruned.
[I 2021-05-07 15:18:32,759] Trial 329 pruned.
[I 2021-05-07 15:18:32,869] Trial 330 pruned.
[I 2021-05-07 15:18:33,109] Trial 331 pruned.
[I 2021-05-07 15:18:33,478] Trial 332 pruned.
[I 2021-05-07 15:18:33,609] Trial 333 pruned.
[I 2021-05-07 15:18:33,831] Trial 334 pruned.
[I 2021-05-07 15:18:33,933] Trial 335 pruned.
[I 2021-05-07 15:18:34,472] Trial 336 pruned.
[I 2021-05-07 15:18:34,587] Trial 337 pruned.
[I 2021-05-07 15:18:35,059] Trial 338 pruned.
[I 2021-05-07 15:18:35,194] Trial 339 pruned.
[I 2021-05-07 15:18:35,617] Trial 340 pruned.
[I 2021-05-07 15:18:36,008] Trial 341 pruned.
[I 2021-05-07 15:18:36,122] Trial 342 pruned.
[I 2021-05-07 15:18:36,555] Trial 343 pruned.
[I 2021-05-07 15:18:36,756] Trial 344 pruned.
[I 2021-05-07 15:18:36,847] Trial 345 pruned.
[I 2021-05-07 15:18:36,955] Trial 346 pruned.
[I 2021-05-07 15:18:37,872] Trial 347 pruned.
[I 2021-05-07 15:18:38,263] Trial 348 pruned.
[I 2021-05-07 15:18:38,376] Trial 349 pruned.
[I 2021-05-07 15:19:19,658] Trial 350 finished with value: 160.6999053955078 and parameters: {'lr': 0.006466495583985849, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 972, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:19:20,090] Trial 351 pruned.
[I 2021-05-07 15:19:20,512] Trial 352 pruned.
[I 2021-05-07 15:19:20,949] Trial 353 pruned.
[I 2021-05-07 15:19:21,379] Trial 354 pruned.
[I 2021-05-07 15:19:21,818] Trial 355 pruned.
[I 2021-05-07 15:19:22,219] Trial 356 pruned.
[I 2021-05-07 15:19:22,649] Trial 357 pruned.
[I 2021-05-07 15:19:22,855] Trial 358 pruned.
[I 2021-05-07 15:19:25,295] Trial 359 pruned.
[I 2021-05-07 15:19:25,494] Trial 360 pruned.
[I 2021-05-07 15:19:25,561] Trial 361 pruned.
[I 2021-05-07 15:19:26,409] Trial 362 pruned.
[I 2021-05-07 15:19:26,803] Trial 363 pruned.
[I 2021-05-07 15:19:27,181] Trial 364 pruned.
[I 2021-05-07 15:19:27,387] Trial 365 pruned.
[I 2021-05-07 15:19:27,816] Trial 366 pruned.
[I 2021-05-07 15:19:28,245] Trial 367 pruned.
[I 2021-05-07 15:19:28,313] Trial 368 pruned.
[I 2021-05-07 15:19:30,564] Trial 369 pruned.
[I 2021-05-07 15:19:31,191] Trial 370 pruned.
[I 2021-05-07 15:19:31,581] Trial 371 pruned.
[I 2021-05-07 15:19:49,925] Trial 372 finished with value: 164.38677978515625 and parameters: {'lr': 0.004762083477726626, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 478, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:19:50,180] Trial 373 pruned.
[I 2021-05-07 15:19:50,556] Trial 374 pruned.
[I 2021-05-07 15:19:50,663] Trial 375 pruned.
[I 2021-05-07 15:19:50,734] Trial 376 pruned.
[I 2021-05-07 15:19:50,933] Trial 377 pruned.
[I 2021-05-07 15:19:51,701] Trial 378 pruned.
[I 2021-05-07 15:19:51,816] Trial 379 pruned.
[I 2021-05-07 15:19:52,068] Trial 380 pruned.
[I 2021-05-07 15:19:52,813] Trial 381 pruned.
[I 2021-05-07 15:19:52,949] Trial 382 pruned.
[I 2021-05-07 15:19:53,281] Trial 383 pruned.
[I 2021-05-07 15:19:54,057] Trial 384 pruned.
[I 2021-05-07 15:19:54,125] Trial 385 pruned.
[I 2021-05-07 15:19:54,263] Trial 386 pruned.
[I 2021-05-07 15:20:14,829] Trial 387 finished with value: 165.9886932373047 and parameters: {'lr': 0.004888958555459554, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 1000, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:20:15,248] Trial 388 pruned.
[I 2021-05-07 15:20:15,489] Trial 389 pruned.
[I 2021-05-07 15:20:15,707] Trial 390 pruned.
[I 2021-05-07 15:20:16,142] Trial 391 pruned.
[I 2021-05-07 15:20:16,277] Trial 392 pruned.
[I 2021-05-07 15:20:16,503] Trial 393 pruned.
[I 2021-05-07 15:20:16,928] Trial 394 pruned.
[I 2021-05-07 15:20:17,010] Trial 395 pruned.
[I 2021-05-07 15:20:17,255] Trial 396 pruned.
[I 2021-05-07 15:20:17,447] Trial 397 pruned.
[I 2021-05-07 15:20:18,182] Trial 398 pruned.
[I 2021-05-07 15:20:18,405] Trial 399 pruned.
[I 2021-05-07 15:20:18,533] Trial 400 pruned.
[I 2021-05-07 15:20:18,932] Trial 401 pruned.
[I 2021-05-07 15:20:19,126] Trial 402 pruned.
[I 2021-05-07 15:20:19,260] Trial 403 pruned.
[I 2021-05-07 15:20:19,720] Trial 404 pruned.
[I 2021-05-07 15:20:19,814] Trial 405 pruned.
[I 2021-05-07 15:20:20,020] Trial 406 pruned.
[I 2021-05-07 15:20:20,206] Trial 407 pruned.
[I 2021-05-07 15:20:22,185] Trial 408 pruned.
[I 2021-05-07 15:20:22,582] Trial 409 pruned.
[I 2021-05-07 15:20:22,697] Trial 410 pruned.
[I 2021-05-07 15:20:23,123] Trial 411 pruned.
[I 2021-05-07 15:20:23,377] Trial 412 pruned.
[I 2021-05-07 15:20:24,118] Trial 413 pruned.
[I 2021-05-07 15:20:24,250] Trial 414 pruned.
[I 2021-05-07 15:20:24,329] Trial 415 pruned.
[I 2021-05-07 15:20:24,539] Trial 416 pruned.
[I 2021-05-07 15:20:24,922] Trial 417 pruned.
[I 2021-05-07 15:20:25,039] Trial 418 pruned.
[I 2021-05-07 15:20:25,260] Trial 419 pruned.
[I 2021-05-07 15:20:25,879] Trial 420 pruned.
[I 2021-05-07 15:20:26,078] Trial 421 pruned.
[I 2021-05-07 15:20:26,193] Trial 422 pruned.
[I 2021-05-07 15:20:26,645] Trial 423 pruned.
[I 2021-05-07 15:20:26,727] Trial 424 pruned.
[I 2021-05-07 15:20:26,927] Trial 425 pruned.
[I 2021-05-07 15:20:27,039] Trial 426 pruned.
[I 2021-05-07 15:20:29,464] Trial 427 pruned.
[I 2021-05-07 15:20:29,902] Trial 428 pruned.
[I 2021-05-07 15:20:30,010] Trial 429 pruned.
[I 2021-05-07 15:20:30,488] Trial 430 pruned.
[I 2021-05-07 15:20:30,690] Trial 431 pruned.
[I 2021-05-07 15:20:30,868] Trial 432 pruned.
[I 2021-05-07 15:20:31,595] Trial 433 pruned.
[I 2021-05-07 15:20:31,663] Trial 434 pruned.
[I 2021-05-07 15:20:32,051] Trial 435 pruned.
[I 2021-05-07 15:20:32,168] Trial 436 pruned.
[I 2021-05-07 15:20:33,009] Trial 437 pruned.
[I 2021-05-07 15:20:52,839] Trial 438 finished with value: 163.5986785888672 and parameters: {'lr': 0.0027198372205223128, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 928, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:20:52,969] Trial 439 pruned.
[I 2021-05-07 15:20:53,468] Trial 440 pruned.
[I 2021-05-07 15:20:53,694] Trial 441 pruned.
[I 2021-05-07 15:20:54,107] Trial 442 pruned.
[I 2021-05-07 15:20:54,232] Trial 443 pruned.
[I 2021-05-07 15:20:54,453] Trial 444 pruned.
[I 2021-05-07 15:20:54,534] Trial 445 pruned.
[I 2021-05-07 15:20:54,952] Trial 446 pruned.
[I 2021-05-07 15:20:55,181] Trial 447 pruned.
[I 2021-05-07 15:20:55,361] Trial 448 pruned.
[I 2021-05-07 15:20:55,868] Trial 449 pruned.
[I 2021-05-07 15:20:56,091] Trial 450 pruned.
[I 2021-05-07 15:20:56,206] Trial 451 pruned.
[I 2021-05-07 15:21:32,938] Trial 452 finished with value: 164.49327087402344 and parameters: {'lr': 0.0026215203196377677, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 674, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:22:09,849] Trial 453 finished with value: 164.71060180664062 and parameters: {'lr': 0.0026014233318506367, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 680, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:22:10,956] Trial 454 pruned.
[I 2021-05-07 15:22:11,707] Trial 455 pruned.
[I 2021-05-07 15:22:12,095] Trial 456 pruned.
[I 2021-05-07 15:22:12,851] Trial 457 pruned.
[I 2021-05-07 15:22:13,235] Trial 458 pruned.
[I 2021-05-07 15:22:13,691] Trial 459 pruned.
[I 2021-05-07 15:22:14,074] Trial 460 pruned.
[I 2021-05-07 15:22:14,819] Trial 461 pruned.
[I 2021-05-07 15:22:15,207] Trial 462 pruned.
[I 2021-05-07 15:22:16,466] Trial 463 pruned.
[I 2021-05-07 15:22:16,862] Trial 464 pruned.
[I 2021-05-07 15:22:17,245] Trial 465 pruned.
[I 2021-05-07 15:22:17,634] Trial 466 pruned.
[I 2021-05-07 15:22:54,536] Trial 467 finished with value: 164.78338623046875 and parameters: {'lr': 0.0022097774539972283, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 718, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:22:55,002] Trial 468 pruned.
[I 2021-05-07 15:22:55,755] Trial 469 pruned.
[I 2021-05-07 15:22:56,521] Trial 470 pruned.
[I 2021-05-07 15:22:56,593] Trial 471 pruned.
[I 2021-05-07 15:22:57,365] Trial 472 pruned.
[I 2021-05-07 15:22:57,750] Trial 473 pruned.
[I 2021-05-07 15:22:58,382] Trial 474 pruned.
[I 2021-05-07 15:22:58,774] Trial 475 pruned.
[I 2021-05-07 15:22:58,860] Trial 476 pruned.
[I 2021-05-07 15:22:59,615] Trial 477 pruned.
[I 2021-05-07 15:23:00,787] Trial 478 pruned.
[I 2021-05-07 15:23:01,175] Trial 479 pruned.
[I 2021-05-07 15:23:01,273] Trial 480 pruned.
[I 2021-05-07 15:23:01,667] Trial 481 pruned.
[I 2021-05-07 15:23:02,065] Trial 482 pruned.
[I 2021-05-07 15:23:02,288] Trial 483 pruned.
[I 2021-05-07 15:23:03,216] Trial 484 pruned.
[I 2021-05-07 15:23:03,290] Trial 485 pruned.
[I 2021-05-07 15:23:03,494] Trial 486 pruned.
[I 2021-05-07 15:23:03,882] Trial 487 pruned.
[I 2021-05-07 15:23:03,999] Trial 488 pruned.
[I 2021-05-07 15:23:04,387] Trial 489 pruned.
[I 2021-05-07 15:23:04,609] Trial 490 pruned.
[I 2021-05-07 15:23:04,994] Trial 491 pruned.
[I 2021-05-07 15:23:05,175] Trial 492 pruned.
[I 2021-05-07 15:23:05,405] Trial 493 pruned.
[I 2021-05-07 15:23:05,824] Trial 494 pruned.
[I 2021-05-07 15:23:05,964] Trial 495 pruned.
[I 2021-05-07 15:23:06,037] Trial 496 pruned.
[I 2021-05-07 15:23:06,320] Trial 497 pruned.
[I 2021-05-07 15:23:06,748] Trial 498 pruned.
[I 2021-05-07 15:23:06,864] Trial 499 pruned.
[I 2021-05-07 15:23:07,273] Trial 500 pruned.
[I 2021-05-07 15:23:07,505] Trial 501 pruned.
[I 2021-05-07 15:23:07,897] Trial 502 pruned.
[I 2021-05-07 15:23:08,173] Trial 503 pruned.
[I 2021-05-07 15:23:08,289] Trial 504 pruned.
[I 2021-05-07 15:23:09,059] Trial 505 pruned.
[I 2021-05-07 15:23:11,502] Trial 506 pruned.
[I 2021-05-07 15:23:11,948] Trial 507 pruned.
[I 2021-05-07 15:23:12,064] Trial 508 pruned.
[I 2021-05-07 15:23:12,135] Trial 509 pruned.
[I 2021-05-07 15:23:12,977] Trial 510 pruned.
[I 2021-05-07 15:23:13,396] Trial 511 pruned.
[I 2021-05-07 15:23:13,544] Trial 512 pruned.
[I 2021-05-07 15:23:13,925] Trial 513 pruned.
[I 2021-05-07 15:23:14,153] Trial 514 pruned.
[I 2021-05-07 15:23:14,226] Trial 515 pruned.
[I 2021-05-07 15:23:14,793] Trial 516 pruned.
[I 2021-05-07 15:23:14,970] Trial 517 pruned.
[I 2021-05-07 15:23:15,357] Trial 518 pruned.
[I 2021-05-07 15:23:16,105] Trial 519 pruned.
[I 2021-05-07 15:23:16,216] Trial 520 pruned.
[I 2021-05-07 15:23:16,989] Trial 521 pruned.
[I 2021-05-07 15:23:17,241] Trial 522 pruned.
[I 2021-05-07 15:23:17,636] Trial 523 pruned.
[I 2021-05-07 15:23:17,758] Trial 524 pruned.
[I 2021-05-07 15:23:17,839] Trial 525 pruned.
[I 2021-05-07 15:23:18,067] Trial 526 pruned.
[I 2021-05-07 15:23:18,456] Trial 527 pruned.
[I 2021-05-07 15:23:18,572] Trial 528 pruned.
[I 2021-05-07 15:23:19,000] Trial 529 pruned.
[I 2021-05-07 15:23:19,209] Trial 530 pruned.
[I 2021-05-07 15:23:19,711] Trial 531 pruned.
[I 2021-05-07 15:23:19,921] Trial 532 pruned.
[I 2021-05-07 15:23:20,053] Trial 533 pruned.
[I 2021-05-07 15:23:20,680] Trial 534 pruned.
[I 2021-05-07 15:23:20,761] Trial 535 pruned.
[I 2021-05-07 15:23:41,445] Trial 536 finished with value: 165.1686553955078 and parameters: {'lr': 0.00259533430789289, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 1014, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:23:41,890] Trial 537 pruned.
[I 2021-05-07 15:23:42,022] Trial 538 pruned.
[I 2021-05-07 15:23:42,328] Trial 539 pruned.
[I 2021-05-07 15:23:43,579] Trial 540 pruned.
[I 2021-05-07 15:23:43,711] Trial 541 pruned.
[I 2021-05-07 15:23:43,989] Trial 542 pruned.
[I 2021-05-07 15:23:44,369] Trial 543 pruned.
[I 2021-05-07 15:23:44,814] Trial 544 pruned.
[I 2021-05-07 15:23:44,896] Trial 545 pruned.
[I 2021-05-07 15:23:45,230] Trial 546 pruned.
[I 2021-05-07 15:23:45,344] Trial 547 pruned.
[I 2021-05-07 15:23:45,736] Trial 548 pruned.
[I 2021-05-07 15:23:46,013] Trial 549 pruned.
[I 2021-05-07 15:23:46,257] Trial 550 pruned.
[I 2021-05-07 15:23:46,676] Trial 551 pruned.
[I 2021-05-07 15:23:46,895] Trial 552 pruned.
[I 2021-05-07 15:23:47,666] Trial 553 pruned.
[I 2021-05-07 15:23:47,821] Trial 554 pruned.
[I 2021-05-07 15:23:47,905] Trial 555 pruned.
[I 2021-05-07 15:23:48,308] Trial 556 pruned.
[I 2021-05-07 15:23:48,538] Trial 557 pruned.
[I 2021-05-07 15:23:48,672] Trial 558 pruned.
[I 2021-05-07 15:23:49,312] Trial 559 pruned.
[I 2021-05-07 15:23:49,542] Trial 560 pruned.
[I 2021-05-07 15:23:50,161] Trial 561 pruned.
[I 2021-05-07 15:23:50,300] Trial 562 pruned.
[I 2021-05-07 15:23:50,695] Trial 563 pruned.
[I 2021-05-07 15:23:50,914] Trial 564 pruned.
[I 2021-05-07 15:23:50,985] Trial 565 pruned.
[I 2021-05-07 15:23:51,734] Trial 566 pruned.
[I 2021-05-07 15:23:51,856] Trial 567 pruned.
[I 2021-05-07 15:23:52,101] Trial 568 pruned.
[I 2021-05-07 15:23:52,532] Trial 569 pruned.
[I 2021-05-07 15:23:52,649] Trial 570 pruned.
[I 2021-05-07 15:23:53,224] Trial 571 pruned.
[I 2021-05-07 15:23:53,849] Trial 572 pruned.
[I 2021-05-07 15:23:54,281] Trial 573 pruned.
[I 2021-05-07 15:23:54,364] Trial 574 pruned.
[I 2021-05-07 15:23:54,809] Trial 575 pruned.
[I 2021-05-07 15:23:54,946] Trial 576 pruned.
[I 2021-05-07 15:23:55,696] Trial 577 pruned.
[I 2021-05-07 15:23:55,927] Trial 578 pruned.
[I 2021-05-07 15:23:56,045] Trial 579 pruned.
[I 2021-05-07 15:23:56,437] Trial 580 pruned.
[I 2021-05-07 15:23:56,661] Trial 581 pruned.
[I 2021-05-07 15:23:57,043] Trial 582 pruned.
[I 2021-05-07 15:23:57,190] Trial 583 pruned.
[I 2021-05-07 15:23:57,629] Trial 584 pruned.
[I 2021-05-07 15:23:57,701] Trial 585 pruned.
[I 2021-05-07 15:23:57,907] Trial 586 pruned.
[I 2021-05-07 15:23:58,091] Trial 587 pruned.
[I 2021-05-07 15:23:58,470] Trial 588 pruned.
[I 2021-05-07 15:23:58,702] Trial 589 pruned.
[I 2021-05-07 15:23:59,135] Trial 590 pruned.
[I 2021-05-07 15:23:59,256] Trial 591 pruned.
[I 2021-05-07 15:23:59,643] Trial 592 pruned.
[I 2021-05-07 15:23:59,880] Trial 593 pruned.
[I 2021-05-07 15:23:59,963] Trial 594 pruned.
[I 2021-05-07 15:24:00,353] Trial 595 pruned.
[I 2021-05-07 15:24:00,482] Trial 596 pruned.
[I 2021-05-07 15:24:00,776] Trial 597 pruned.
[I 2021-05-07 15:24:01,185] Trial 598 pruned.
[I 2021-05-07 15:24:01,316] Trial 599 pruned.
[I 2021-05-07 15:24:01,535] Trial 600 pruned.
[I 2021-05-07 15:24:01,905] Trial 601 pruned.
[I 2021-05-07 15:24:02,542] Trial 602 pruned.
[I 2021-05-07 15:24:02,627] Trial 603 pruned.
[I 2021-05-07 15:24:02,834] Trial 604 pruned.
[I 2021-05-07 15:24:02,968] Trial 605 pruned.
[I 2021-05-07 15:24:03,355] Trial 606 pruned.
[I 2021-05-07 15:24:03,612] Trial 607 pruned.
[I 2021-05-07 15:24:03,747] Trial 608 pruned.
[I 2021-05-07 15:24:04,510] Trial 609 pruned.
[I 2021-05-07 15:24:04,713] Trial 610 pruned.
[I 2021-05-07 15:24:05,354] Trial 611 pruned.
[I 2021-05-07 15:24:05,521] Trial 612 pruned.
[I 2021-05-07 15:24:05,594] Trial 613 pruned.
[I 2021-05-07 15:24:06,355] Trial 614 pruned.
[I 2021-05-07 15:24:07,554] Trial 615 pruned.
[I 2021-05-07 15:24:07,697] Trial 616 pruned.
[I 2021-05-07 15:24:08,515] Trial 617 pruned.
[I 2021-05-07 15:24:08,720] Trial 618 pruned.
[I 2021-05-07 15:24:09,236] Trial 619 pruned.
[I 2021-05-07 15:24:09,450] Trial 620 pruned.
[I 2021-05-07 15:24:10,186] Trial 621 pruned.
[I 2021-05-07 15:24:10,577] Trial 622 pruned.
[I 2021-05-07 15:24:10,661] Trial 623 pruned.
[I 2021-05-07 15:24:11,042] Trial 624 pruned.
[I 2021-05-07 15:24:11,158] Trial 625 pruned.
[I 2021-05-07 15:24:11,579] Trial 626 pruned.
[I 2021-05-07 15:24:12,207] Trial 627 pruned.
[I 2021-05-07 15:24:12,344] Trial 628 pruned.
[I 2021-05-07 15:24:12,558] Trial 629 pruned.
[I 2021-05-07 15:24:13,396] Trial 630 pruned.
[I 2021-05-07 15:24:14,142] Trial 631 pruned.
[I 2021-05-07 15:24:14,540] Trial 632 pruned.
[I 2021-05-07 15:24:14,628] Trial 633 pruned.
[I 2021-05-07 15:24:14,745] Trial 634 pruned.
[I 2021-05-07 15:24:15,128] Trial 635 pruned.
[I 2021-05-07 15:24:15,357] Trial 636 pruned.
[I 2021-05-07 15:24:15,485] Trial 637 pruned.
[I 2021-05-07 15:24:15,871] Trial 638 pruned.
[I 2021-05-07 15:24:16,342] Trial 639 pruned.
[I 2021-05-07 15:24:17,175] Trial 640 pruned.
[I 2021-05-07 15:24:17,342] Trial 641 pruned.
[I 2021-05-07 15:24:17,420] Trial 642 pruned.
[I 2021-05-07 15:24:18,068] Trial 643 pruned.
[I 2021-05-07 15:24:18,268] Trial 644 pruned.
[I 2021-05-07 15:24:18,385] Trial 645 pruned.
[I 2021-05-07 15:24:18,772] Trial 646 pruned.
[I 2021-05-07 15:24:19,005] Trial 647 pruned.
[I 2021-05-07 15:24:19,915] Trial 648 pruned.
[I 2021-05-07 15:24:20,050] Trial 649 pruned.
[I 2021-05-07 15:24:20,900] Trial 650 pruned.
[I 2021-05-07 15:24:21,128] Trial 651 pruned.
[I 2021-05-07 15:24:21,209] Trial 652 pruned.
[I 2021-05-07 15:24:21,862] Trial 653 pruned.
[I 2021-05-07 15:24:21,979] Trial 654 pruned.
[I 2021-05-07 15:24:22,459] Trial 655 pruned.
[I 2021-05-07 15:24:22,895] Trial 656 pruned.
[I 2021-05-07 15:24:23,024] Trial 657 pruned.
[I 2021-05-07 15:24:23,815] Trial 658 pruned.
[I 2021-05-07 15:24:24,591] Trial 659 pruned.
[I 2021-05-07 15:24:24,977] Trial 660 pruned.
[I 2021-05-07 15:24:25,410] Trial 661 pruned.
[I 2021-05-07 15:24:25,526] Trial 662 pruned.
[I 2021-05-07 15:24:25,613] Trial 663 pruned.
[I 2021-05-07 15:24:26,356] Trial 664 pruned.
[I 2021-05-07 15:24:26,558] Trial 665 pruned.
[I 2021-05-07 15:24:26,676] Trial 666 pruned.
[I 2021-05-07 15:24:27,160] Trial 667 pruned.
[I 2021-05-07 15:24:27,557] Trial 668 pruned.
[I 2021-05-07 15:24:27,990] Trial 669 pruned.
[I 2021-05-07 15:24:28,128] Trial 670 pruned.
[I 2021-05-07 15:24:28,210] Trial 671 pruned.
[I 2021-05-07 15:24:28,760] Trial 672 pruned.
[I 2021-05-07 15:24:29,093] Trial 673 pruned.
[I 2021-05-07 15:24:29,215] Trial 674 pruned.
[I 2021-05-07 15:24:29,689] Trial 675 pruned.
[I 2021-05-07 15:24:29,894] Trial 676 pruned.
[I 2021-05-07 15:24:32,955] Trial 677 pruned.
[I 2021-05-07 15:24:33,072] Trial 678 pruned.
[I 2021-05-07 15:24:34,357] Trial 679 pruned.
[I 2021-05-07 15:24:34,584] Trial 680 pruned.
[I 2021-05-07 15:24:34,660] Trial 681 pruned.
[I 2021-05-07 15:24:35,050] Trial 682 pruned.
[I 2021-05-07 15:24:35,185] Trial 683 pruned.
[I 2021-05-07 15:24:35,517] Trial 684 pruned.
[I 2021-05-07 15:24:36,031] Trial 685 pruned.
[I 2021-05-07 15:24:36,153] Trial 686 pruned.
[I 2021-05-07 15:24:36,543] Trial 687 pruned.
[I 2021-05-07 15:24:36,927] Trial 688 pruned.
[I 2021-05-07 15:24:37,363] Trial 689 pruned.
[I 2021-05-07 15:24:37,602] Trial 690 pruned.
[I 2021-05-07 15:24:37,736] Trial 691 pruned.
[I 2021-05-07 15:24:37,824] Trial 692 pruned.
[I 2021-05-07 15:24:38,208] Trial 693 pruned.
[I 2021-05-07 15:24:38,437] Trial 694 pruned.
[I 2021-05-07 15:24:38,562] Trial 695 pruned.
[I 2021-05-07 15:24:39,189] Trial 696 pruned.
[I 2021-05-07 15:24:39,403] Trial 697 pruned.
[I 2021-05-07 15:24:39,791] Trial 698 pruned.
[I 2021-05-07 15:24:39,961] Trial 699 pruned.
[I 2021-05-07 15:24:40,374] Trial 700 pruned.
[I 2021-05-07 15:24:40,460] Trial 701 pruned.
[I 2021-05-07 15:24:40,854] Trial 702 pruned.
[I 2021-05-07 15:24:40,995] Trial 703 pruned.
[I 2021-05-07 15:24:41,760] Trial 704 pruned.
[I 2021-05-07 15:24:41,968] Trial 705 pruned.
[I 2021-05-07 15:24:42,386] Trial 706 pruned.
[I 2021-05-07 15:24:42,505] Trial 707 pruned.
[I 2021-05-07 15:24:43,285] Trial 708 pruned.
[I 2021-05-07 15:24:43,513] Trial 709 pruned.
[I 2021-05-07 15:24:43,604] Trial 710 pruned.
[I 2021-05-07 15:24:44,027] Trial 711 pruned.
[I 2021-05-07 15:24:44,218] Trial 712 pruned.
[I 2021-05-07 15:24:44,665] Trial 713 pruned.
[I 2021-05-07 15:24:45,406] Trial 714 pruned.
[I 2021-05-07 15:24:45,532] Trial 715 pruned.
[I 2021-05-07 15:24:46,335] Trial 716 pruned.
[I 2021-05-07 15:24:46,770] Trial 717 pruned.
[I 2021-05-07 15:24:47,510] Trial 718 pruned.
[I 2021-05-07 15:24:47,721] Trial 719 pruned.
[I 2021-05-07 15:24:47,864] Trial 720 pruned.
[I 2021-05-07 15:24:47,938] Trial 721 pruned.
[I 2021-05-07 15:24:49,171] Trial 722 pruned.
[I 2021-05-07 15:24:49,821] Trial 723 pruned.
[I 2021-05-07 15:24:49,939] Trial 724 pruned.
[I 2021-05-07 15:24:50,674] Trial 725 pruned.
[I 2021-05-07 15:24:50,909] Trial 726 pruned.
[I 2021-05-07 15:24:51,649] Trial 727 pruned.
[I 2021-05-07 15:24:51,774] Trial 728 pruned.
[I 2021-05-07 15:25:36,252] Trial 729 finished with value: 164.67733764648438 and parameters: {'lr': 0.0023887056069721952, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 652, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:25:36,727] Trial 730 pruned.
[I 2021-05-07 15:25:37,192] Trial 731 pruned.
[I 2021-05-07 15:25:38,125] Trial 732 pruned.
[I 2021-05-07 15:25:38,595] Trial 733 pruned.
[I 2021-05-07 15:25:39,529] Trial 734 pruned.
[I 2021-05-07 15:25:41,773] Trial 735 pruned.
[I 2021-05-07 15:25:42,258] Trial 736 pruned.
[I 2021-05-07 15:25:42,729] Trial 737 pruned.
[I 2021-05-07 15:25:44,137] Trial 738 pruned.
[I 2021-05-07 15:25:45,050] Trial 739 pruned.
[I 2021-05-07 15:25:45,521] Trial 740 pruned.
[I 2021-05-07 15:25:46,001] Trial 741 pruned.
[I 2021-05-07 15:25:46,916] Trial 742 pruned.
[I 2021-05-07 15:25:47,404] Trial 743 pruned.
[I 2021-05-07 15:25:47,892] Trial 744 pruned.
[I 2021-05-07 15:25:47,981] Trial 745 pruned.
[I 2021-05-07 15:25:48,548] Trial 746 pruned.
[I 2021-05-07 15:25:48,808] Trial 747 pruned.
[I 2021-05-07 15:25:49,443] Trial 748 pruned.
[I 2021-05-07 15:25:49,728] Trial 749 pruned.
[I 2021-05-07 15:25:50,565] Trial 750 pruned.
[I 2021-05-07 15:25:50,643] Trial 751 pruned.
[I 2021-05-07 15:25:51,029] Trial 752 pruned.
[I 2021-05-07 15:25:51,253] Trial 753 pruned.
[I 2021-05-07 15:25:51,771] Trial 754 pruned.
[I 2021-05-07 15:25:52,103] Trial 755 pruned.
[I 2021-05-07 15:25:52,551] Trial 756 pruned.
[I 2021-05-07 15:26:29,100] Trial 757 finished with value: 165.62911987304688 and parameters: {'lr': 0.00270049573036226, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 592, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:26:29,485] Trial 758 pruned.
[I 2021-05-07 15:26:29,694] Trial 759 pruned.
[I 2021-05-07 15:26:29,771] Trial 760 pruned.
[I 2021-05-07 15:26:30,163] Trial 761 pruned.
[I 2021-05-07 15:26:30,373] Trial 762 pruned.
[I 2021-05-07 15:27:07,033] Trial 763 finished with value: 163.39097595214844 and parameters: {'lr': 0.0027864890458761346, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 600, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:27:07,426] Trial 764 pruned.
[I 2021-05-07 15:27:08,185] Trial 765 pruned.
[I 2021-05-07 15:27:08,942] Trial 766 pruned.
[I 2021-05-07 15:27:11,157] Trial 767 pruned.
[I 2021-05-07 15:27:11,548] Trial 768 pruned.
[I 2021-05-07 15:27:11,939] Trial 769 pruned.
[I 2021-05-07 15:27:13,060] Trial 770 pruned.
[I 2021-05-07 15:27:14,186] Trial 771 pruned.
[I 2021-05-07 15:27:14,577] Trial 772 pruned.
[I 2021-05-07 15:27:15,334] Trial 773 pruned.
[I 2021-05-07 15:27:16,086] Trial 774 pruned.
[I 2021-05-07 15:27:16,841] Trial 775 pruned.
[I 2021-05-07 15:27:17,231] Trial 776 pruned.
[I 2021-05-07 15:27:22,728] Trial 777 pruned.
[I 2021-05-07 15:27:23,118] Trial 778 pruned.
[I 2021-05-07 15:27:25,694] Trial 779 pruned.
[I 2021-05-07 15:27:25,771] Trial 780 pruned.
[I 2021-05-07 15:27:26,211] Trial 781 pruned.
[I 2021-05-07 15:27:26,646] Trial 782 pruned.
[I 2021-05-07 15:27:27,039] Trial 783 pruned.
[I 2021-05-07 15:27:27,427] Trial 784 pruned.
[I 2021-05-07 15:27:27,514] Trial 785 pruned.
[I 2021-05-07 15:27:27,921] Trial 786 pruned.
[I 2021-05-07 15:27:28,311] Trial 787 pruned.
[I 2021-05-07 15:27:28,697] Trial 788 pruned.
[I 2021-05-07 15:27:29,539] Trial 789 pruned.
[I 2021-05-07 15:27:30,291] Trial 790 pruned.
[I 2021-05-07 15:27:30,378] Trial 791 pruned.
[I 2021-05-07 15:27:31,183] Trial 792 pruned.
[I 2021-05-07 15:27:31,616] Trial 793 pruned.
[I 2021-05-07 15:27:32,034] Trial 794 pruned.
[I 2021-05-07 15:27:32,789] Trial 795 pruned.
[I 2021-05-07 15:27:32,864] Trial 796 pruned.
[I 2021-05-07 15:27:32,985] Trial 797 pruned.
[I 2021-05-07 15:27:33,421] Trial 798 pruned.
[I 2021-05-07 15:27:34,245] Trial 799 pruned.
[I 2021-05-07 15:27:34,641] Trial 800 pruned.
[I 2021-05-07 15:27:34,779] Trial 801 pruned.
[I 2021-05-07 15:27:35,546] Trial 802 pruned.
[I 2021-05-07 15:27:35,624] Trial 803 pruned.
[I 2021-05-07 15:27:36,051] Trial 804 pruned.
[I 2021-05-07 15:27:36,173] Trial 805 pruned.
[I 2021-05-07 15:27:36,572] Trial 806 pruned.
[I 2021-05-07 15:27:37,010] Trial 807 pruned.
[I 2021-05-07 15:27:37,132] Trial 808 pruned.
[I 2021-05-07 15:27:37,521] Trial 809 pruned.
[I 2021-05-07 15:27:37,756] Trial 810 pruned.
[I 2021-05-07 15:27:37,832] Trial 811 pruned.
[I 2021-05-07 15:27:39,038] Trial 812 pruned.
[I 2021-05-07 15:27:39,160] Trial 813 pruned.
[I 2021-05-07 15:27:39,918] Trial 814 pruned.
[I 2021-05-07 15:27:40,134] Trial 815 pruned.
[I 2021-05-07 15:27:41,265] Trial 816 pruned.
[I 2021-05-07 15:27:41,403] Trial 817 pruned.
[I 2021-05-07 15:27:42,160] Trial 818 pruned.
[I 2021-05-07 15:27:42,374] Trial 819 pruned.
[I 2021-05-07 15:27:43,138] Trial 820 pruned.
[I 2021-05-07 15:27:43,229] Trial 821 pruned.
[I 2021-05-07 15:27:43,374] Trial 822 pruned.
[I 2021-05-07 15:27:44,010] Trial 823 pruned.
[I 2021-05-07 15:27:44,788] Trial 824 pruned.
[I 2021-05-07 15:27:45,632] Trial 825 pruned.
[I 2021-05-07 15:27:46,026] Trial 826 pruned.
[I 2021-05-07 15:27:46,159] Trial 827 pruned.
[I 2021-05-07 15:27:47,021] Trial 828 pruned.
[I 2021-05-07 15:27:47,423] Trial 829 pruned.
[I 2021-05-07 15:27:47,634] Trial 830 pruned.
[I 2021-05-07 15:27:47,723] Trial 831 pruned.
[I 2021-05-07 15:27:48,128] Trial 832 pruned.
[I 2021-05-07 15:27:48,250] Trial 833 pruned.
[I 2021-05-07 15:27:48,681] Trial 834 pruned.
[I 2021-05-07 15:27:48,896] Trial 835 pruned.
[I 2021-05-07 15:27:49,331] Trial 836 pruned.
[I 2021-05-07 15:27:49,472] Trial 837 pruned.
[I 2021-05-07 15:27:50,239] Trial 838 pruned.
[I 2021-05-07 15:27:50,480] Trial 839 pruned.
[I 2021-05-07 15:27:50,885] Trial 840 pruned.
[I 2021-05-07 15:27:50,979] Trial 841 pruned.
[I 2021-05-07 15:27:51,128] Trial 842 pruned.
[I 2021-05-07 15:27:51,522] Trial 843 pruned.
[I 2021-05-07 15:27:51,737] Trial 844 pruned.
[I 2021-05-07 15:27:52,521] Trial 845 pruned.
[I 2021-05-07 15:27:52,644] Trial 846 pruned.
[I 2021-05-07 15:28:32,324] Trial 847 finished with value: 166.2036590576172 and parameters: {'lr': 0.002345873974422654, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 930, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:28:33,133] Trial 848 pruned.
[I 2021-05-07 15:28:33,934] Trial 849 pruned.
[I 2021-05-07 15:28:34,350] Trial 850 pruned.
[I 2021-05-07 15:28:34,758] Trial 851 pruned.
[I 2021-05-07 15:28:35,573] Trial 852 pruned.
[I 2021-05-07 15:28:35,959] Trial 853 pruned.
[I 2021-05-07 15:28:37,163] Trial 854 pruned.
[I 2021-05-07 15:28:37,974] Trial 855 pruned.
[I 2021-05-07 15:28:38,386] Trial 856 pruned.
[I 2021-05-07 15:28:38,775] Trial 857 pruned.
[I 2021-05-07 15:28:39,164] Trial 858 pruned.
[I 2021-05-07 15:28:39,934] Trial 859 pruned.
[I 2021-05-07 15:28:40,320] Trial 860 pruned.
[I 2021-05-07 15:28:40,711] Trial 861 pruned.
[I 2021-05-07 15:28:41,134] Trial 862 pruned.
[I 2021-05-07 15:28:41,346] Trial 863 pruned.
[I 2021-05-07 15:28:41,736] Trial 864 pruned.
[I 2021-05-07 15:28:42,492] Trial 865 pruned.
[I 2021-05-07 15:28:42,630] Trial 866 pruned.
[I 2021-05-07 15:28:42,843] Trial 867 pruned.
[I 2021-05-07 15:28:43,231] Trial 868 pruned.
[I 2021-05-07 15:28:43,986] Trial 869 pruned.
[I 2021-05-07 15:28:44,198] Trial 870 pruned.
[I 2021-05-07 15:28:44,331] Trial 871 pruned.
[I 2021-05-07 15:28:45,094] Trial 872 pruned.
[I 2021-05-07 15:28:47,304] Trial 873 pruned.
[I 2021-05-07 15:28:48,436] Trial 874 pruned.
[I 2021-05-07 15:28:48,578] Trial 875 pruned.
[I 2021-05-07 15:28:49,360] Trial 876 pruned.
[I 2021-05-07 15:29:30,584] Trial 877 finished with value: 162.35157775878906 and parameters: {'lr': 0.006801332160217757, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:29:31,339] Trial 878 pruned.
[I 2021-05-07 15:29:31,733] Trial 879 pruned.
[I 2021-05-07 15:29:32,172] Trial 880 pruned.
[I 2021-05-07 15:29:32,926] Trial 881 pruned.
[I 2021-05-07 15:29:33,311] Trial 882 pruned.
[I 2021-05-07 15:29:33,708] Trial 883 pruned.
[I 2021-05-07 15:29:34,147] Trial 884 pruned.
[I 2021-05-07 15:29:34,569] Trial 885 pruned.
[I 2021-05-07 15:29:34,998] Trial 886 pruned.
[I 2021-05-07 15:29:35,436] Trial 887 pruned.
[I 2021-05-07 15:29:36,236] Trial 888 pruned.
[I 2021-05-07 15:29:36,625] Trial 889 pruned.
[I 2021-05-07 15:30:13,746] Trial 890 finished with value: 162.57220458984375 and parameters: {'lr': 0.0020748327612825695, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 646, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:30:14,516] Trial 891 pruned.
[I 2021-05-07 15:30:14,915] Trial 892 pruned.
[I 2021-05-07 15:30:16,053] Trial 893 pruned.
[I 2021-05-07 15:30:54,395] Trial 894 finished with value: 165.51539611816406 and parameters: {'lr': 0.002177929282592793, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 628, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:30:54,604] Trial 895 pruned.
[I 2021-05-07 15:30:54,995] Trial 896 pruned.
[I 2021-05-07 15:30:55,115] Trial 897 pruned.
[I 2021-05-07 15:30:55,507] Trial 898 pruned.
[I 2021-05-07 15:30:55,716] Trial 899 pruned.
[I 2021-05-07 15:30:56,477] Trial 900 pruned.
[I 2021-05-07 15:30:56,603] Trial 901 pruned.
[I 2021-05-07 15:30:56,992] Trial 902 pruned.
[I 2021-05-07 15:30:57,203] Trial 903 pruned.
[I 2021-05-07 15:30:57,597] Trial 904 pruned.
[I 2021-05-07 15:30:57,719] Trial 905 pruned.
[I 2021-05-07 15:30:58,475] Trial 906 pruned.
[I 2021-05-07 15:30:58,684] Trial 907 pruned.
[I 2021-05-07 15:30:59,442] Trial 908 pruned.
[I 2021-05-07 15:30:59,832] Trial 909 pruned.
[I 2021-05-07 15:30:59,954] Trial 910 pruned.
[I 2021-05-07 15:31:00,165] Trial 911 pruned.
[I 2021-05-07 15:31:00,567] Trial 912 pruned.
[I 2021-05-07 15:31:01,320] Trial 913 pruned.
[I 2021-05-07 15:31:01,530] Trial 914 pruned.
[I 2021-05-07 15:31:01,654] Trial 915 pruned.
[I 2021-05-07 15:31:02,083] Trial 916 pruned.
[I 2021-05-07 15:31:02,853] Trial 917 pruned.
[I 2021-05-07 15:31:03,070] Trial 918 pruned.
[I 2021-05-07 15:31:03,465] Trial 919 pruned.
[I 2021-05-07 15:31:03,589] Trial 920 pruned.
[I 2021-05-07 15:31:04,363] Trial 921 pruned.
[I 2021-05-07 15:31:04,581] Trial 922 pruned.
[I 2021-05-07 15:31:05,348] Trial 923 pruned.
[I 2021-05-07 15:31:05,475] Trial 924 pruned.
[I 2021-05-07 15:31:42,500] Trial 925 finished with value: 166.0503387451172 and parameters: {'lr': 0.002348791940286377, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 660, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:31:42,893] Trial 926 pruned.
[I 2021-05-07 15:31:43,646] Trial 927 pruned.
[I 2021-05-07 15:31:43,860] Trial 928 pruned.
[I 2021-05-07 15:31:44,252] Trial 929 pruned.
[I 2021-05-07 15:31:44,377] Trial 930 pruned.
[I 2021-05-07 15:31:44,771] Trial 931 pruned.
[I 2021-05-07 15:31:44,983] Trial 932 pruned.
[I 2021-05-07 15:32:21,930] Trial 933 finished with value: 162.64761352539062 and parameters: {'lr': 0.002540456328980289, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 656, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:32:22,333] Trial 934 pruned.
[I 2021-05-07 15:32:22,457] Trial 935 pruned.
[I 2021-05-07 15:32:22,673] Trial 936 pruned.
[I 2021-05-07 15:32:23,093] Trial 937 pruned.
[I 2021-05-07 15:32:23,515] Trial 938 pruned.
[I 2021-05-07 15:32:23,650] Trial 939 pruned.
[I 2021-05-07 15:32:23,881] Trial 940 pruned.
[I 2021-05-07 15:32:24,390] Trial 941 pruned.
[I 2021-05-07 15:32:24,881] Trial 942 pruned.
[I 2021-05-07 15:32:25,115] Trial 943 pruned.
[I 2021-05-07 15:32:25,519] Trial 944 pruned.
[I 2021-05-07 15:32:25,643] Trial 945 pruned.
[I 2021-05-07 15:32:26,952] Trial 946 pruned.
[I 2021-05-07 15:32:27,162] Trial 947 pruned.
[I 2021-05-07 15:32:28,415] Trial 948 pruned.
[I 2021-05-07 15:32:28,676] Trial 949 pruned.
[I 2021-05-07 15:32:29,423] Trial 950 pruned.
[I 2021-05-07 15:32:29,667] Trial 951 pruned.
[I 2021-05-07 15:32:30,060] Trial 952 pruned.
[I 2021-05-07 15:32:30,453] Trial 953 pruned.
[I 2021-05-07 15:32:30,630] Trial 954 pruned.
[I 2021-05-07 15:32:33,210] Trial 955 pruned.
[I 2021-05-07 15:32:33,453] Trial 956 pruned.
[I 2021-05-07 15:32:34,607] Trial 957 pruned.
[I 2021-05-07 15:32:34,737] Trial 958 pruned.
[I 2021-05-07 15:32:35,583] Trial 959 pruned.
[I 2021-05-07 15:32:35,795] Trial 960 pruned.
[I 2021-05-07 15:32:36,195] Trial 961 pruned.
[I 2021-05-07 15:32:36,586] Trial 962 pruned.
[I 2021-05-07 15:32:36,734] Trial 963 pruned.
[I 2021-05-07 15:32:36,951] Trial 964 pruned.
[I 2021-05-07 15:32:37,404] Trial 965 pruned.
[I 2021-05-07 15:32:38,164] Trial 966 pruned.
[I 2021-05-07 15:32:38,307] Trial 967 pruned.
[I 2021-05-07 15:32:40,034] Trial 968 pruned.
[I 2021-05-07 15:32:40,425] Trial 969 pruned.
[I 2021-05-07 15:32:41,172] Trial 970 pruned.
[I 2021-05-07 15:32:41,392] Trial 971 pruned.
[I 2021-05-07 15:32:41,865] Trial 972 pruned.
[I 2021-05-07 15:32:41,990] Trial 973 pruned.
[I 2021-05-07 15:32:42,774] Trial 974 pruned.
[I 2021-05-07 15:32:43,011] Trial 975 pruned.
[I 2021-05-07 15:32:43,406] Trial 976 pruned.
[I 2021-05-07 15:32:43,533] Trial 977 pruned.
[I 2021-05-07 15:32:43,928] Trial 978 pruned.
[I 2021-05-07 15:32:44,361] Trial 979 pruned.
[I 2021-05-07 15:32:44,642] Trial 980 pruned.
[I 2021-05-07 15:32:45,417] Trial 981 pruned.
[I 2021-05-07 15:32:45,539] Trial 982 pruned.
[I 2021-05-07 15:32:45,927] Trial 983 pruned.
[I 2021-05-07 15:32:46,171] Trial 984 pruned.
[I 2021-05-07 15:32:46,566] Trial 985 pruned.
[I 2021-05-07 15:32:46,690] Trial 986 pruned.
[I 2021-05-07 15:32:47,130] Trial 987 pruned.
[I 2021-05-07 15:32:47,351] Trial 988 pruned.
[I 2021-05-07 15:32:48,280] Trial 989 pruned.
[I 2021-05-07 15:32:48,715] Trial 990 pruned.
[I 2021-05-07 15:32:48,842] Trial 991 pruned.
[I 2021-05-07 15:32:49,054] Trial 992 pruned.
[I 2021-05-07 15:32:49,528] Trial 993 pruned.
[I 2021-05-07 15:32:49,933] Trial 994 pruned.
[I 2021-05-07 15:32:50,252] Trial 995 pruned.
[I 2021-05-07 15:32:50,465] Trial 996 pruned.
[I 2021-05-07 15:32:51,295] Trial 997 pruned.
[I 2021-05-07 15:32:51,700] Trial 998 pruned.
[I 2021-05-07 15:32:51,921] Trial 999 pruned.
[I 2021-05-07 15:32:52,365] Trial 1000 pruned.
[I 2021-05-07 15:32:52,492] Trial 1001 pruned.
[I 2021-05-07 15:32:53,249] Trial 1002 pruned.
[I 2021-05-07 15:32:53,646] Trial 1003 pruned.
[I 2021-05-07 15:32:54,091] Trial 1004 pruned.
[I 2021-05-07 15:32:54,217] Trial 1005 pruned.
[I 2021-05-07 15:32:54,621] Trial 1006 pruned.
[I 2021-05-07 15:32:55,010] Trial 1007 pruned.
[I 2021-05-07 15:32:55,229] Trial 1008 pruned.
[I 2021-05-07 15:32:55,748] Trial 1009 pruned.
[I 2021-05-07 15:32:55,873] Trial 1010 pruned.
[I 2021-05-07 15:32:56,277] Trial 1011 pruned.
[I 2021-05-07 15:32:56,490] Trial 1012 pruned.
[I 2021-05-07 15:32:56,925] Trial 1013 pruned.
[I 2021-05-07 15:32:57,051] Trial 1014 pruned.
[I 2021-05-07 15:32:57,493] Trial 1015 pruned.
[I 2021-05-07 15:32:57,902] Trial 1016 pruned.
[I 2021-05-07 15:32:58,291] Trial 1017 pruned.
[I 2021-05-07 15:32:59,217] Trial 1018 pruned.
[I 2021-05-07 15:32:59,359] Trial 1019 pruned.
[I 2021-05-07 15:32:59,756] Trial 1020 pruned.
[I 2021-05-07 15:33:00,163] Trial 1021 pruned.
[I 2021-05-07 15:33:00,552] Trial 1022 pruned.
[I 2021-05-07 15:33:00,694] Trial 1023 pruned.
[I 2021-05-07 15:33:00,906] Trial 1024 pruned.
[I 2021-05-07 15:33:01,687] Trial 1025 pruned.
[I 2021-05-07 15:33:02,135] Trial 1026 pruned.
[I 2021-05-07 15:33:02,373] Trial 1027 pruned.
[I 2021-05-07 15:33:03,492] Trial 1028 pruned.
[I 2021-05-07 15:33:03,642] Trial 1029 pruned.
[I 2021-05-07 15:33:04,037] Trial 1030 pruned.
[I 2021-05-07 15:33:04,447] Trial 1031 pruned.
[I 2021-05-07 15:33:04,884] Trial 1032 pruned.
[I 2021-05-07 15:33:05,007] Trial 1033 pruned.
[I 2021-05-07 15:33:05,763] Trial 1034 pruned.
[I 2021-05-07 15:33:06,543] Trial 1035 pruned.
[I 2021-05-07 15:33:06,942] Trial 1036 pruned.
[I 2021-05-07 15:33:07,797] Trial 1037 pruned.
[I 2021-05-07 15:33:07,926] Trial 1038 pruned.
[I 2021-05-07 15:33:08,895] Trial 1039 pruned.
[I 2021-05-07 15:33:09,163] Trial 1040 pruned.
[I 2021-05-07 15:33:09,614] Trial 1041 pruned.
[I 2021-05-07 15:33:09,744] Trial 1042 pruned.
[I 2021-05-07 15:33:10,542] Trial 1043 pruned.
[I 2021-05-07 15:33:10,761] Trial 1044 pruned.
[I 2021-05-07 15:33:11,244] Trial 1045 pruned.
[I 2021-05-07 15:33:11,662] Trial 1046 pruned.
[I 2021-05-07 15:33:11,789] Trial 1047 pruned.
[I 2021-05-07 15:33:12,007] Trial 1048 pruned.
[I 2021-05-07 15:33:12,451] Trial 1049 pruned.
[I 2021-05-07 15:33:13,294] Trial 1050 pruned.
[I 2021-05-07 15:33:13,433] Trial 1051 pruned.
[I 2021-05-07 15:33:13,665] Trial 1052 pruned.
[I 2021-05-07 15:33:14,417] Trial 1053 pruned.
[I 2021-05-07 15:33:14,819] Trial 1054 pruned.
[I 2021-05-07 15:33:15,037] Trial 1055 pruned.
[I 2021-05-07 15:33:25,802] Trial 1056 pruned.
[I 2021-05-07 15:33:25,934] Trial 1057 pruned.
[I 2021-05-07 15:33:26,546] Trial 1058 pruned.
[I 2021-05-07 15:33:26,835] Trial 1059 pruned.
[I 2021-05-07 15:33:27,246] Trial 1060 pruned.
[I 2021-05-07 15:33:27,375] Trial 1061 pruned.
[I 2021-05-07 15:33:27,782] Trial 1062 pruned.
[I 2021-05-07 15:33:28,236] Trial 1063 pruned.
[I 2021-05-07 15:33:28,455] Trial 1064 pruned.
[I 2021-05-07 15:33:28,862] Trial 1065 pruned.
[I 2021-05-07 15:33:29,109] Trial 1066 pruned.
[I 2021-05-07 15:33:29,606] Trial 1067 pruned.
[I 2021-05-07 15:33:29,818] Trial 1068 pruned.
[I 2021-05-07 15:33:30,231] Trial 1069 pruned.
[I 2021-05-07 15:33:30,358] Trial 1070 pruned.
[I 2021-05-07 15:33:30,795] Trial 1071 pruned.
[I 2021-05-07 15:33:31,206] Trial 1072 pruned.
[I 2021-05-07 15:33:31,634] Trial 1073 pruned.
[I 2021-05-07 15:33:32,501] Trial 1074 pruned.
[I 2021-05-07 15:33:32,635] Trial 1075 pruned.
[I 2021-05-07 15:33:32,914] Trial 1076 pruned.
[I 2021-05-07 15:33:33,686] Trial 1077 pruned.
[I 2021-05-07 15:33:34,450] Trial 1078 pruned.
[I 2021-05-07 15:33:34,584] Trial 1079 pruned.
[I 2021-05-07 15:33:34,813] Trial 1080 pruned.
[I 2021-05-07 15:33:35,604] Trial 1081 pruned.
[I 2021-05-07 15:33:36,058] Trial 1082 pruned.
[I 2021-05-07 15:33:36,316] Trial 1083 pruned.
[I 2021-05-07 15:33:36,773] Trial 1084 pruned.
[I 2021-05-07 15:33:36,915] Trial 1085 pruned.
[I 2021-05-07 15:33:37,708] Trial 1086 pruned.
[I 2021-05-07 15:33:38,027] Trial 1087 pruned.
[I 2021-05-07 15:33:38,423] Trial 1088 pruned.
[I 2021-05-07 15:33:38,563] Trial 1089 pruned.
[I 2021-05-07 15:33:39,418] Trial 1090 pruned.
[I 2021-05-07 15:33:39,812] Trial 1091 pruned.
[I 2021-05-07 15:33:40,081] Trial 1092 pruned.
[I 2021-05-07 15:33:40,841] Trial 1093 pruned.
[I 2021-05-07 15:33:40,964] Trial 1094 pruned.
[I 2021-05-07 15:33:41,397] Trial 1095 pruned.
[I 2021-05-07 15:33:41,803] Trial 1096 pruned.
[I 2021-05-07 15:33:42,558] Trial 1097 pruned.
[I 2021-05-07 15:33:42,681] Trial 1098 pruned.
[I 2021-05-07 15:33:43,107] Trial 1099 pruned.
[I 2021-05-07 15:33:43,321] Trial 1100 pruned.
[I 2021-05-07 15:33:43,709] Trial 1101 pruned.
[I 2021-05-07 15:33:44,228] Trial 1102 pruned.
[I 2021-05-07 15:33:44,360] Trial 1103 pruned.
[I 2021-05-07 15:33:44,597] Trial 1104 pruned.
[I 2021-05-07 15:33:44,990] Trial 1105 pruned.
[I 2021-05-07 15:33:45,424] Trial 1106 pruned.
[I 2021-05-07 15:33:45,549] Trial 1107 pruned.
[I 2021-05-07 15:33:45,784] Trial 1108 pruned.
[I 2021-05-07 15:33:46,271] Trial 1109 pruned.
[I 2021-05-07 15:33:46,664] Trial 1110 pruned.
[I 2021-05-07 15:33:46,886] Trial 1111 pruned.
[I 2021-05-07 15:33:47,459] Trial 1112 pruned.
[I 2021-05-07 15:33:47,602] Trial 1113 pruned.
[I 2021-05-07 15:33:48,039] Trial 1114 pruned.
[I 2021-05-07 15:33:48,484] Trial 1115 pruned.
[I 2021-05-07 15:33:48,884] Trial 1116 pruned.
[I 2021-05-07 15:33:49,008] Trial 1117 pruned.
[I 2021-05-07 15:33:49,399] Trial 1118 pruned.
[I 2021-05-07 15:33:50,551] Trial 1119 pruned.
[I 2021-05-07 15:33:50,763] Trial 1120 pruned.
[I 2021-05-07 15:33:51,156] Trial 1121 pruned.
[I 2021-05-07 15:33:51,293] Trial 1122 pruned.
[I 2021-05-07 15:33:51,709] Trial 1123 pruned.
[I 2021-05-07 15:33:51,966] Trial 1124 pruned.
[I 2021-05-07 15:33:52,409] Trial 1125 pruned.
[I 2021-05-07 15:33:52,553] Trial 1126 pruned.
[I 2021-05-07 15:33:52,925] Trial 1127 pruned.
[I 2021-05-07 15:33:53,137] Trial 1128 pruned.
[I 2021-05-07 15:33:53,535] Trial 1129 pruned.
[I 2021-05-07 15:33:54,294] Trial 1130 pruned.
[I 2021-05-07 15:33:54,418] Trial 1131 pruned.
[I 2021-05-07 15:33:54,698] Trial 1132 pruned.
[I 2021-05-07 15:33:55,106] Trial 1133 pruned.
[I 2021-05-07 15:33:55,858] Trial 1134 pruned.
[I 2021-05-07 15:33:55,987] Trial 1135 pruned.
[I 2021-05-07 15:33:56,205] Trial 1136 pruned.
[I 2021-05-07 15:33:57,411] Trial 1137 pruned.
[I 2021-05-07 15:33:58,254] Trial 1138 pruned.
[I 2021-05-07 15:33:58,468] Trial 1139 pruned.
[I 2021-05-07 15:33:58,871] Trial 1140 pruned.
[I 2021-05-07 15:33:59,032] Trial 1141 pruned.
[I 2021-05-07 15:33:59,789] Trial 1142 pruned.
[I 2021-05-07 15:34:00,385] Trial 1143 pruned.
[I 2021-05-07 15:34:00,814] Trial 1144 pruned.
[I 2021-05-07 15:34:00,938] Trial 1145 pruned.
[I 2021-05-07 15:34:01,343] Trial 1146 pruned.
[I 2021-05-07 15:34:02,095] Trial 1147 pruned.
[I 2021-05-07 15:34:02,335] Trial 1148 pruned.
[I 2021-05-07 15:34:02,734] Trial 1149 pruned.
[I 2021-05-07 15:34:02,863] Trial 1150 pruned.
[I 2021-05-07 15:34:03,337] Trial 1151 pruned.
[I 2021-05-07 15:34:03,571] Trial 1152 pruned.
[I 2021-05-07 15:34:43,809] Trial 1153 finished with value: 164.26290893554688 and parameters: {'lr': 0.002817650812349433, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 906, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:34:44,231] Trial 1154 pruned.
[I 2021-05-07 15:34:44,659] Trial 1155 pruned.
[I 2021-05-07 15:34:45,487] Trial 1156 pruned.
[I 2021-05-07 15:34:45,918] Trial 1157 pruned.
[I 2021-05-07 15:34:46,749] Trial 1158 pruned.
[I 2021-05-07 15:34:47,249] Trial 1159 pruned.
[I 2021-05-07 15:34:47,684] Trial 1160 pruned.
[I 2021-05-07 15:34:48,510] Trial 1161 pruned.
[I 2021-05-07 15:34:48,939] Trial 1162 pruned.
[I 2021-05-07 15:34:49,361] Trial 1163 pruned.
[I 2021-05-07 15:34:50,161] Trial 1164 pruned.
[I 2021-05-07 15:34:51,001] Trial 1165 pruned.
[I 2021-05-07 15:34:51,399] Trial 1166 pruned.
[I 2021-05-07 15:34:51,826] Trial 1167 pruned.
[I 2021-05-07 15:34:52,664] Trial 1168 pruned.
[I 2021-05-07 15:34:53,097] Trial 1169 pruned.
[I 2021-05-07 15:34:53,595] Trial 1170 pruned.
[I 2021-05-07 15:35:33,917] Trial 1171 finished with value: 164.93490600585938 and parameters: {'lr': 0.002512499716291469, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 914, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:35:34,347] Trial 1172 pruned.
[I 2021-05-07 15:36:15,073] Trial 1173 finished with value: 163.62062072753906 and parameters: {'lr': 0.002047493492050887, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 940, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:36:15,505] Trial 1174 pruned.
[I 2021-05-07 15:36:15,644] Trial 1175 pruned.
[I 2021-05-07 15:36:16,079] Trial 1176 pruned.
[I 2021-05-07 15:36:16,927] Trial 1177 pruned.
[I 2021-05-07 15:36:18,436] Trial 1178 pruned.
[I 2021-05-07 15:36:18,573] Trial 1179 pruned.
[I 2021-05-07 15:36:21,816] Trial 1180 pruned.
[I 2021-05-07 15:36:22,248] Trial 1181 pruned.
[I 2021-05-07 15:36:22,390] Trial 1182 pruned.
[I 2021-05-07 15:36:22,820] Trial 1183 pruned.
[I 2021-05-07 15:36:23,251] Trial 1184 pruned.
[I 2021-05-07 15:36:24,095] Trial 1185 pruned.
[I 2021-05-07 15:36:24,233] Trial 1186 pruned.
[I 2021-05-07 15:36:24,825] Trial 1187 pruned.
[I 2021-05-07 15:36:25,355] Trial 1188 pruned.
[I 2021-05-07 15:36:25,494] Trial 1189 pruned.
[I 2021-05-07 15:36:26,316] Trial 1190 pruned.
[I 2021-05-07 15:36:26,755] Trial 1191 pruned.
[I 2021-05-07 15:36:27,172] Trial 1192 pruned.
[I 2021-05-07 15:36:27,313] Trial 1193 pruned.
[I 2021-05-07 15:36:27,738] Trial 1194 pruned.
[I 2021-05-07 15:36:28,178] Trial 1195 pruned.
[I 2021-05-07 15:36:29,419] Trial 1196 pruned.
[I 2021-05-07 15:36:29,560] Trial 1197 pruned.
[I 2021-05-07 15:36:30,392] Trial 1198 pruned.
[I 2021-05-07 15:36:30,831] Trial 1199 pruned.
[I 2021-05-07 15:36:30,967] Trial 1200 pruned.
[I 2021-05-07 15:36:31,488] Trial 1201 pruned.
[I 2021-05-07 15:36:32,732] Trial 1202 pruned.
[I 2021-05-07 15:36:33,155] Trial 1203 pruned.
[I 2021-05-07 15:36:33,299] Trial 1204 pruned.
[I 2021-05-07 15:36:33,879] Trial 1205 pruned.
[I 2021-05-07 15:36:34,725] Trial 1206 pruned.
[I 2021-05-07 15:36:35,156] Trial 1207 pruned.
[I 2021-05-07 15:36:35,303] Trial 1208 pruned.
[I 2021-05-07 15:36:35,715] Trial 1209 pruned.
[I 2021-05-07 15:36:36,151] Trial 1210 pruned.
[I 2021-05-07 15:36:36,314] Trial 1211 pruned.
[I 2021-05-07 15:36:39,964] Trial 1212 pruned.
[I 2021-05-07 15:36:40,375] Trial 1213 pruned.
[I 2021-05-07 15:36:40,762] Trial 1214 pruned.
[I 2021-05-07 15:36:40,905] Trial 1215 pruned.
[I 2021-05-07 15:36:41,314] Trial 1216 pruned.
[I 2021-05-07 15:36:42,153] Trial 1217 pruned.
[I 2021-05-07 15:36:43,136] Trial 1218 pruned.
[I 2021-05-07 15:36:43,278] Trial 1219 pruned.
[I 2021-05-07 15:36:43,721] Trial 1220 pruned.
[I 2021-05-07 15:36:44,152] Trial 1221 pruned.
[I 2021-05-07 15:36:44,286] Trial 1222 pruned.
[I 2021-05-07 15:36:45,074] Trial 1223 pruned.
[I 2021-05-07 15:36:45,909] Trial 1224 pruned.
[I 2021-05-07 15:36:46,353] Trial 1225 pruned.
[I 2021-05-07 15:36:46,505] Trial 1226 pruned.
[I 2021-05-07 15:36:47,323] Trial 1227 pruned.
[I 2021-05-07 15:36:47,762] Trial 1228 pruned.
[I 2021-05-07 15:36:48,922] Trial 1229 pruned.
[I 2021-05-07 15:36:49,049] Trial 1230 pruned.
[I 2021-05-07 15:36:49,468] Trial 1231 pruned.
[I 2021-05-07 15:36:51,564] Trial 1232 pruned.
[I 2021-05-07 15:36:51,707] Trial 1233 pruned.
[I 2021-05-07 15:36:52,867] Trial 1234 pruned.
[I 2021-05-07 15:36:53,300] Trial 1235 pruned.
[I 2021-05-07 15:37:42,352] Trial 1236 finished with value: 160.9252471923828 and parameters: {'lr': 0.0028493198071382155, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 928, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:37:42,789] Trial 1237 pruned.
[I 2021-05-07 15:37:43,622] Trial 1238 pruned.
[I 2021-05-07 15:37:44,134] Trial 1239 pruned.
[I 2021-05-07 15:37:44,982] Trial 1240 pruned.
[I 2021-05-07 15:37:45,414] Trial 1241 pruned.
[I 2021-05-07 15:37:46,017] Trial 1242 pruned.
[I 2021-05-07 15:37:47,271] Trial 1243 pruned.
[I 2021-05-07 15:37:47,705] Trial 1244 pruned.
[I 2021-05-07 15:37:48,545] Trial 1245 pruned.
[I 2021-05-07 15:37:49,395] Trial 1246 pruned.
[I 2021-05-07 15:37:49,922] Trial 1247 pruned.
[I 2021-05-07 15:37:50,356] Trial 1248 pruned.
[I 2021-05-07 15:37:50,802] Trial 1249 pruned.
[I 2021-05-07 15:37:51,630] Trial 1250 pruned.
[I 2021-05-07 15:37:52,053] Trial 1251 pruned.
[I 2021-05-07 15:37:52,891] Trial 1252 pruned.
[I 2021-05-07 15:37:53,332] Trial 1253 pruned.
[I 2021-05-07 15:37:53,768] Trial 1254 pruned.
[I 2021-05-07 15:37:54,614] Trial 1255 pruned.
[I 2021-05-07 15:37:55,038] Trial 1256 pruned.
[I 2021-05-07 15:37:55,482] Trial 1257 pruned.
[I 2021-05-07 15:37:56,267] Trial 1258 pruned.
[I 2021-05-07 15:37:57,133] Trial 1259 pruned.
[I 2021-05-07 15:37:57,661] Trial 1260 pruned.
[I 2021-05-07 15:37:58,070] Trial 1261 pruned.
[I 2021-05-07 15:37:59,298] Trial 1262 pruned.
[I 2021-05-07 15:37:59,729] Trial 1263 pruned.
[I 2021-05-07 15:38:00,155] Trial 1264 pruned.
[I 2021-05-07 15:38:00,564] Trial 1265 pruned.
[I 2021-05-07 15:38:00,946] Trial 1266 pruned.
[I 2021-05-07 15:38:01,465] Trial 1267 pruned.
[I 2021-05-07 15:38:01,687] Trial 1268 pruned.
[I 2021-05-07 15:38:02,937] Trial 1269 pruned.
[I 2021-05-07 15:38:03,341] Trial 1270 pruned.
[I 2021-05-07 15:38:03,766] Trial 1271 pruned.
[I 2021-05-07 15:38:04,003] Trial 1272 pruned.
[I 2021-05-07 15:38:04,598] Trial 1273 pruned.
[I 2021-05-07 15:38:05,023] Trial 1274 pruned.
[I 2021-05-07 15:38:05,428] Trial 1275 pruned.
[I 2021-05-07 15:38:05,644] Trial 1276 pruned.
[I 2021-05-07 15:38:06,077] Trial 1277 pruned.
[I 2021-05-07 15:38:06,492] Trial 1278 pruned.
[I 2021-05-07 15:38:06,894] Trial 1279 pruned.
[I 2021-05-07 15:38:07,335] Trial 1280 pruned.
[I 2021-05-07 15:38:07,861] Trial 1281 pruned.
[I 2021-05-07 15:38:08,298] Trial 1282 pruned.
[I 2021-05-07 15:38:08,520] Trial 1283 pruned.
[I 2021-05-07 15:38:08,928] Trial 1284 pruned.
[I 2021-05-07 15:38:09,368] Trial 1285 pruned.
[I 2021-05-07 15:38:10,124] Trial 1286 pruned.
[I 2021-05-07 15:38:10,744] Trial 1287 pruned.
[I 2021-05-07 15:38:11,270] Trial 1288 pruned.
[I 2021-05-07 15:38:11,711] Trial 1289 pruned.
[I 2021-05-07 15:38:12,128] Trial 1290 pruned.
[I 2021-05-07 15:38:12,345] Trial 1291 pruned.
[I 2021-05-07 15:38:51,049] Trial 1292 finished with value: 162.38992309570312 and parameters: {'lr': 0.002085015319175176, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 882, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:38:51,466] Trial 1293 pruned.
[I 2021-05-07 15:38:52,265] Trial 1294 pruned.
[I 2021-05-07 15:38:53,075] Trial 1295 pruned.
[I 2021-05-07 15:38:53,485] Trial 1296 pruned.
[I 2021-05-07 15:38:54,310] Trial 1297 pruned.
[I 2021-05-07 15:38:54,721] Trial 1298 pruned.
[I 2021-05-07 15:38:55,159] Trial 1299 pruned.
[I 2021-05-07 15:38:55,559] Trial 1300 pruned.
[I 2021-05-07 15:38:55,991] Trial 1301 pruned.
[I 2021-05-07 15:38:56,826] Trial 1302 pruned.
[I 2021-05-07 15:38:57,230] Trial 1303 pruned.
[I 2021-05-07 15:38:57,623] Trial 1304 pruned.
[I 2021-05-07 15:38:58,020] Trial 1305 pruned.
[I 2021-05-07 15:38:59,258] Trial 1306 pruned.
[I 2021-05-07 15:39:00,107] Trial 1307 pruned.
[I 2021-05-07 15:39:00,955] Trial 1308 pruned.
[I 2021-05-07 15:39:01,397] Trial 1309 pruned.
[I 2021-05-07 15:39:37,981] Trial 1310 finished with value: 161.6288604736328 and parameters: {'lr': 0.0022995567286626106, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 726, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:39:38,388] Trial 1311 pruned.
[I 2021-05-07 15:39:38,800] Trial 1312 pruned.
[I 2021-05-07 15:39:39,199] Trial 1313 pruned.
[I 2021-05-07 15:39:39,989] Trial 1314 pruned.
[I 2021-05-07 15:39:40,751] Trial 1315 pruned.
[I 2021-05-07 15:39:41,532] Trial 1316 pruned.
[I 2021-05-07 15:39:42,300] Trial 1317 pruned.
[I 2021-05-07 15:39:42,695] Trial 1318 pruned.
[I 2021-05-07 15:39:43,106] Trial 1319 pruned.
[I 2021-05-07 15:39:43,507] Trial 1320 pruned.
[I 2021-05-07 15:39:44,324] Trial 1321 pruned.
[I 2021-05-07 15:39:45,094] Trial 1322 pruned.
[I 2021-05-07 15:39:45,484] Trial 1323 pruned.
[I 2021-05-07 15:39:46,273] Trial 1324 pruned.
[I 2021-05-07 15:39:47,035] Trial 1325 pruned.
[I 2021-05-07 15:39:47,444] Trial 1326 pruned.
[I 2021-05-07 15:39:48,035] Trial 1327 pruned.
[I 2021-05-07 15:39:48,798] Trial 1328 pruned.
[I 2021-05-07 15:39:49,212] Trial 1329 pruned.
[I 2021-05-07 15:39:49,613] Trial 1330 pruned.
[I 2021-05-07 15:39:50,414] Trial 1331 pruned.
[I 2021-05-07 15:39:50,811] Trial 1332 pruned.
[I 2021-05-07 15:39:51,224] Trial 1333 pruned.
[I 2021-05-07 15:39:51,623] Trial 1334 pruned.
[I 2021-05-07 15:39:52,382] Trial 1335 pruned.
[I 2021-05-07 15:39:52,784] Trial 1336 pruned.
[I 2021-05-07 15:39:53,184] Trial 1337 pruned.
[I 2021-05-07 15:39:53,611] Trial 1338 pruned.
[I 2021-05-07 15:39:54,006] Trial 1339 pruned.
[I 2021-05-07 15:39:54,405] Trial 1340 pruned.
[I 2021-05-07 15:39:54,815] Trial 1341 pruned.
[I 2021-05-07 15:39:55,243] Trial 1342 pruned.
[I 2021-05-07 15:39:55,645] Trial 1343 pruned.
[I 2021-05-07 15:39:56,410] Trial 1344 pruned.
[I 2021-05-07 15:39:56,833] Trial 1345 pruned.
[I 2021-05-07 15:39:57,244] Trial 1346 pruned.
[I 2021-05-07 15:39:57,637] Trial 1347 pruned.
[I 2021-05-07 15:39:58,044] Trial 1348 pruned.
[I 2021-05-07 15:39:58,466] Trial 1349 pruned.
[I 2021-05-07 15:39:58,886] Trial 1350 pruned.
[I 2021-05-07 15:39:59,295] Trial 1351 pruned.
[I 2021-05-07 15:39:59,691] Trial 1352 pruned.
[I 2021-05-07 15:40:00,130] Trial 1353 pruned.
[I 2021-05-07 15:40:00,889] Trial 1354 pruned.
[I 2021-05-07 15:40:01,706] Trial 1355 pruned.
[I 2021-05-07 15:40:02,133] Trial 1356 pruned.
[I 2021-05-07 15:40:02,891] Trial 1357 pruned.
[I 2021-05-07 15:40:03,492] Trial 1358 pruned.
[I 2021-05-07 15:40:03,887] Trial 1359 pruned.
[I 2021-05-07 15:40:04,322] Trial 1360 pruned.
[I 2021-05-07 15:40:04,748] Trial 1361 pruned.
[I 2021-05-07 15:40:05,146] Trial 1362 pruned.
[I 2021-05-07 15:40:05,963] Trial 1363 pruned.
[I 2021-05-07 15:40:06,720] Trial 1364 pruned.
[I 2021-05-07 15:40:07,502] Trial 1365 pruned.
[I 2021-05-07 15:40:07,936] Trial 1366 pruned.
[I 2021-05-07 15:40:08,695] Trial 1367 pruned.
[I 2021-05-07 15:40:09,106] Trial 1368 pruned.
[I 2021-05-07 15:40:10,329] Trial 1369 pruned.
[I 2021-05-07 15:40:11,115] Trial 1370 pruned.
[I 2021-05-07 15:40:11,513] Trial 1371 pruned.
[I 2021-05-07 15:40:12,728] Trial 1372 pruned.
[I 2021-05-07 15:40:13,136] Trial 1373 pruned.
[I 2021-05-07 15:40:13,541] Trial 1374 pruned.
[I 2021-05-07 15:40:13,935] Trial 1375 pruned.
[I 2021-05-07 15:40:14,717] Trial 1376 pruned.
[I 2021-05-07 15:40:15,142] Trial 1377 pruned.
[I 2021-05-07 15:40:15,552] Trial 1378 pruned.
[I 2021-05-07 15:40:16,330] Trial 1379 pruned.
[I 2021-05-07 15:40:17,164] Trial 1380 pruned.
[I 2021-05-07 15:40:17,570] Trial 1381 pruned.
[I 2021-05-07 15:40:18,000] Trial 1382 pruned.
[I 2021-05-07 15:40:18,411] Trial 1383 pruned.
[I 2021-05-07 15:40:18,843] Trial 1384 pruned.
[I 2021-05-07 15:40:19,252] Trial 1385 pruned.
[I 2021-05-07 15:40:19,646] Trial 1386 pruned.
[I 2021-05-07 15:40:20,079] Trial 1387 pruned.
[I 2021-05-07 15:40:20,516] Trial 1388 pruned.
[I 2021-05-07 15:40:20,932] Trial 1389 pruned.
[I 2021-05-07 15:40:22,064] Trial 1390 pruned.
[I 2021-05-07 15:40:22,503] Trial 1391 pruned.
[I 2021-05-07 15:40:23,352] Trial 1392 pruned.
[I 2021-05-07 15:40:23,752] Trial 1393 pruned.
[I 2021-05-07 15:40:24,154] Trial 1394 pruned.
[I 2021-05-07 15:40:24,592] Trial 1395 pruned.
[I 2021-05-07 15:40:25,417] Trial 1396 pruned.
[I 2021-05-07 15:40:25,827] Trial 1397 pruned.
[I 2021-05-07 15:40:26,225] Trial 1398 pruned.
[I 2021-05-07 15:40:26,981] Trial 1399 pruned.
[I 2021-05-07 15:40:27,393] Trial 1400 pruned.
[I 2021-05-07 15:40:27,827] Trial 1401 pruned.
[I 2021-05-07 15:40:28,685] Trial 1402 pruned.
[I 2021-05-07 15:40:29,095] Trial 1403 pruned.
[I 2021-05-07 15:40:29,515] Trial 1404 pruned.
[I 2021-05-07 15:40:30,299] Trial 1405 pruned.
[I 2021-05-07 15:40:30,694] Trial 1406 pruned.
[I 2021-05-07 15:40:31,126] Trial 1407 pruned.
[I 2021-05-07 15:40:31,526] Trial 1408 pruned.
[I 2021-05-07 15:40:31,927] Trial 1409 pruned.
[I 2021-05-07 15:40:32,334] Trial 1410 pruned.
[I 2021-05-07 15:40:33,094] Trial 1411 pruned.
[I 2021-05-07 15:40:33,506] Trial 1412 pruned.
[I 2021-05-07 15:40:33,916] Trial 1413 pruned.
[I 2021-05-07 15:40:34,313] Trial 1414 pruned.
[I 2021-05-07 15:40:35,065] Trial 1415 pruned.
[I 2021-05-07 15:40:35,885] Trial 1416 pruned.
[I 2021-05-07 15:40:36,132] Trial 1417 pruned.
[I 2021-05-07 15:40:36,910] Trial 1418 pruned.
[I 2021-05-07 15:40:38,049] Trial 1419 pruned.
[I 2021-05-07 15:40:38,486] Trial 1420 pruned.
[I 2021-05-07 15:40:38,895] Trial 1421 pruned.
[I 2021-05-07 15:40:39,128] Trial 1422 pruned.
[I 2021-05-07 15:40:39,558] Trial 1423 pruned.
[I 2021-05-07 15:40:40,422] Trial 1424 pruned.
[I 2021-05-07 15:40:40,872] Trial 1425 pruned.
[I 2021-05-07 15:40:41,280] Trial 1426 pruned.
[I 2021-05-07 15:40:41,509] Trial 1427 pruned.
[I 2021-05-07 15:40:41,970] Trial 1428 pruned.
[I 2021-05-07 15:40:42,777] Trial 1429 pruned.
[I 2021-05-07 15:40:43,219] Trial 1430 pruned.
[I 2021-05-07 15:40:43,644] Trial 1431 pruned.
[I 2021-05-07 15:40:43,881] Trial 1432 pruned.
[I 2021-05-07 15:40:44,723] Trial 1433 pruned.
[I 2021-05-07 15:40:45,143] Trial 1434 pruned.
[I 2021-05-07 15:40:45,605] Trial 1435 pruned.
[I 2021-05-07 15:40:45,829] Trial 1436 pruned.
[I 2021-05-07 15:40:46,256] Trial 1437 pruned.
[I 2021-05-07 15:40:46,704] Trial 1438 pruned.
[I 2021-05-07 15:40:47,586] Trial 1439 pruned.
[I 2021-05-07 15:40:47,987] Trial 1440 pruned.
[I 2021-05-07 15:40:48,216] Trial 1441 pruned.
[I 2021-05-07 15:40:49,002] Trial 1442 pruned.
[I 2021-05-07 15:40:49,437] Trial 1443 pruned.
[I 2021-05-07 15:40:49,890] Trial 1444 pruned.
[I 2021-05-07 15:40:52,623] Trial 1445 pruned.
[I 2021-05-07 15:40:52,868] Trial 1446 pruned.
[I 2021-05-07 15:40:53,447] Trial 1447 pruned.
[I 2021-05-07 15:40:54,190] Trial 1448 pruned.
[I 2021-05-07 15:40:54,616] Trial 1449 pruned.
[I 2021-05-07 15:40:55,393] Trial 1450 pruned.
[I 2021-05-07 15:40:55,629] Trial 1451 pruned.
[I 2021-05-07 15:40:56,459] Trial 1452 pruned.
[I 2021-05-07 15:40:57,258] Trial 1453 pruned.
[I 2021-05-07 15:40:57,680] Trial 1454 pruned.
[I 2021-05-07 15:40:58,528] Trial 1455 pruned.
[I 2021-05-07 15:40:58,776] Trial 1456 pruned.
[I 2021-05-07 15:40:59,218] Trial 1457 pruned.
[I 2021-05-07 15:41:00,362] Trial 1458 pruned.
[I 2021-05-07 15:41:00,759] Trial 1459 pruned.
[I 2021-05-07 15:41:00,983] Trial 1460 pruned.
[I 2021-05-07 15:41:01,807] Trial 1461 pruned.
[I 2021-05-07 15:41:03,078] Trial 1462 pruned.
[I 2021-05-07 15:41:03,479] Trial 1463 pruned.
[I 2021-05-07 15:41:04,085] Trial 1464 pruned.
[I 2021-05-07 15:41:04,331] Trial 1465 pruned.
[I 2021-05-07 15:41:04,748] Trial 1466 pruned.
[I 2021-05-07 15:41:05,519] Trial 1467 pruned.
[I 2021-05-07 15:41:06,309] Trial 1468 pruned.
[I 2021-05-07 15:41:07,126] Trial 1469 pruned.
[I 2021-05-07 15:41:07,558] Trial 1470 pruned.
[I 2021-05-07 15:41:07,965] Trial 1471 pruned.
[I 2021-05-07 15:41:08,406] Trial 1472 pruned.
[I 2021-05-07 15:41:08,820] Trial 1473 pruned.
[I 2021-05-07 15:41:09,593] Trial 1474 pruned.
[I 2021-05-07 15:41:09,831] Trial 1475 pruned.
[I 2021-05-07 15:41:10,248] Trial 1476 pruned.
[I 2021-05-07 15:41:11,096] Trial 1477 pruned.
[I 2021-05-07 15:41:11,980] Trial 1478 pruned.
[I 2021-05-07 15:41:12,373] Trial 1479 pruned.
[I 2021-05-07 15:41:12,597] Trial 1480 pruned.
[I 2021-05-07 15:41:13,004] Trial 1481 pruned.
[I 2021-05-07 15:41:29,159] Trial 1482 pruned.
[I 2021-05-07 15:41:29,931] Trial 1483 pruned.
[I 2021-05-07 15:41:30,339] Trial 1484 pruned.
[I 2021-05-07 15:41:30,430] Trial 1485 pruned.
[I 2021-05-07 15:41:30,883] Trial 1486 pruned.
[I 2021-05-07 15:41:31,320] Trial 1487 pruned.
[I 2021-05-07 15:41:32,100] Trial 1488 pruned.
[I 2021-05-07 15:41:32,927] Trial 1489 pruned.
[I 2021-05-07 15:41:33,176] Trial 1490 pruned.
[I 2021-05-07 15:41:33,964] Trial 1491 pruned.
[I 2021-05-07 15:41:34,782] Trial 1492 pruned.
[I 2021-05-07 15:41:35,212] Trial 1493 pruned.
[I 2021-05-07 15:41:35,616] Trial 1494 pruned.
[I 2021-05-07 15:41:35,847] Trial 1495 pruned.
[I 2021-05-07 15:41:36,289] Trial 1496 pruned.
[I 2021-05-07 15:41:36,721] Trial 1497 pruned.
[I 2021-05-07 15:41:37,136] Trial 1498 pruned.
[I 2021-05-07 15:41:37,721] Trial 1499 pruned.
[I 2021-05-07 15:41:37,971] Trial 1500 pruned.
[I 2021-05-07 15:41:38,368] Trial 1501 pruned.
[I 2021-05-07 15:41:38,771] Trial 1502 pruned.
[I 2021-05-07 15:41:38,920] Trial 1503 pruned.
[I 2021-05-07 15:41:39,670] Trial 1504 pruned.
[I 2021-05-07 15:41:40,101] Trial 1505 pruned.
[I 2021-05-07 15:41:40,324] Trial 1506 pruned.
[I 2021-05-07 15:41:40,776] Trial 1507 pruned.
[I 2021-05-07 15:41:41,600] Trial 1508 pruned.
[I 2021-05-07 15:41:41,858] Trial 1509 pruned.
[I 2021-05-07 15:41:42,728] Trial 1510 pruned.
[I 2021-05-07 15:41:42,823] Trial 1511 pruned.
[I 2021-05-07 15:41:43,228] Trial 1512 pruned.
[I 2021-05-07 15:41:43,479] Trial 1513 pruned.
[I 2021-05-07 15:41:43,879] Trial 1514 pruned.
[I 2021-05-07 15:41:44,311] Trial 1515 pruned.
[I 2021-05-07 15:41:44,744] Trial 1516 pruned.
[I 2021-05-07 15:41:44,880] Trial 1517 pruned.
[I 2021-05-07 15:41:45,109] Trial 1518 pruned.
[I 2021-05-07 15:41:45,532] Trial 1519 pruned.
[I 2021-05-07 15:41:45,947] Trial 1520 pruned.
[I 2021-05-07 15:41:46,354] Trial 1521 pruned.
[I 2021-05-07 15:41:46,808] Trial 1522 pruned.
[I 2021-05-07 15:41:47,055] Trial 1523 pruned.
[I 2021-05-07 15:41:47,488] Trial 1524 pruned.
[I 2021-05-07 15:41:47,629] Trial 1525 pruned.
[I 2021-05-07 15:41:48,391] Trial 1526 pruned.
[I 2021-05-07 15:41:49,653] Trial 1527 pruned.
[I 2021-05-07 15:41:50,063] Trial 1528 pruned.
[I 2021-05-07 15:41:50,291] Trial 1529 pruned.
[I 2021-05-07 15:41:50,884] Trial 1530 pruned.
[I 2021-05-07 15:41:51,292] Trial 1531 pruned.
[I 2021-05-07 15:41:51,431] Trial 1532 pruned.
[I 2021-05-07 15:41:51,884] Trial 1533 pruned.
[I 2021-05-07 15:41:52,316] Trial 1534 pruned.
[I 2021-05-07 15:41:52,557] Trial 1535 pruned.
[I 2021-05-07 15:41:52,962] Trial 1536 pruned.
[I 2021-05-07 15:41:53,843] Trial 1537 pruned.
[I 2021-05-07 15:41:53,981] Trial 1538 pruned.
[I 2021-05-07 15:41:54,429] Trial 1539 pruned.
[I 2021-05-07 15:41:54,519] Trial 1540 pruned.
[I 2021-05-07 15:41:54,921] Trial 1541 pruned.
[I 2021-05-07 15:41:55,173] Trial 1542 pruned.
[I 2021-05-07 15:41:55,618] Trial 1543 pruned.
[I 2021-05-07 15:41:56,035] Trial 1544 pruned.
[I 2021-05-07 15:41:56,483] Trial 1545 pruned.
[I 2021-05-07 15:41:56,622] Trial 1546 pruned.
[I 2021-05-07 15:41:56,884] Trial 1547 pruned.
[I 2021-05-07 15:41:57,298] Trial 1548 pruned.
[I 2021-05-07 15:41:57,720] Trial 1549 pruned.
[I 2021-05-07 15:41:58,168] Trial 1550 pruned.
[I 2021-05-07 15:41:58,581] Trial 1551 pruned.
[I 2021-05-07 15:41:58,833] Trial 1552 pruned.
[I 2021-05-07 15:41:59,609] Trial 1553 pruned.
[I 2021-05-07 15:41:59,759] Trial 1554 pruned.
[I 2021-05-07 15:42:00,618] Trial 1555 pruned.
[I 2021-05-07 15:42:01,028] Trial 1556 pruned.
[I 2021-05-07 15:42:01,463] Trial 1557 pruned.
[I 2021-05-07 15:42:01,709] Trial 1558 pruned.
[I 2021-05-07 15:42:02,885] Trial 1559 pruned.
[I 2021-05-07 15:42:03,665] Trial 1560 pruned.
[I 2021-05-07 15:42:03,864] Trial 1561 pruned.
[I 2021-05-07 15:42:04,661] Trial 1562 pruned.
[I 2021-05-07 15:42:05,421] Trial 1563 pruned.
[I 2021-05-07 15:42:05,678] Trial 1564 pruned.
[I 2021-05-07 15:42:06,115] Trial 1565 pruned.
[I 2021-05-07 15:42:06,559] Trial 1566 pruned.
[I 2021-05-07 15:42:06,699] Trial 1567 pruned.
[I 2021-05-07 15:42:43,913] Trial 1568 finished with value: 162.83457946777344 and parameters: {'lr': 0.0019307268843197587, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 674, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:42:44,336] Trial 1569 pruned.
[I 2021-05-07 15:42:44,565] Trial 1570 pruned.
[I 2021-05-07 15:42:44,658] Trial 1571 pruned.
[I 2021-05-07 15:42:45,078] Trial 1572 pruned.
[I 2021-05-07 15:42:45,512] Trial 1573 pruned.
[I 2021-05-07 15:42:45,928] Trial 1574 pruned.
[I 2021-05-07 15:42:46,068] Trial 1575 pruned.
[I 2021-05-07 15:42:46,473] Trial 1576 pruned.
[I 2021-05-07 15:42:46,702] Trial 1577 pruned.
[I 2021-05-07 15:42:47,496] Trial 1578 pruned.
[I 2021-05-07 15:42:48,872] Trial 1579 pruned.
[I 2021-05-07 15:42:49,299] Trial 1580 pruned.
[I 2021-05-07 15:42:49,540] Trial 1581 pruned.
[I 2021-05-07 15:42:49,958] Trial 1582 pruned.
[I 2021-05-07 15:42:50,107] Trial 1583 pruned.
[I 2021-05-07 15:42:50,533] Trial 1584 pruned.
[I 2021-05-07 15:42:50,943] Trial 1585 pruned.
[I 2021-05-07 15:42:51,542] Trial 1586 pruned.
[I 2021-05-07 15:42:51,770] Trial 1587 pruned.
[I 2021-05-07 15:42:52,189] Trial 1588 pruned.
[I 2021-05-07 15:42:52,603] Trial 1589 pruned.
[I 2021-05-07 15:42:52,745] Trial 1590 pruned.
[I 2021-05-07 15:42:53,532] Trial 1591 pruned.
[I 2021-05-07 15:42:53,941] Trial 1592 pruned.
[I 2021-05-07 15:42:54,171] Trial 1593 pruned.
[I 2021-05-07 15:42:54,578] Trial 1594 pruned.
[I 2021-05-07 15:42:54,977] Trial 1595 pruned.
[I 2021-05-07 15:42:55,389] Trial 1596 pruned.
[I 2021-05-07 15:42:55,525] Trial 1597 pruned.
[I 2021-05-07 15:42:56,298] Trial 1598 pruned.
[I 2021-05-07 15:42:56,393] Trial 1599 pruned.
[I 2021-05-07 15:42:56,620] Trial 1600 pruned.
[I 2021-05-07 15:42:57,399] Trial 1601 pruned.
[I 2021-05-07 15:42:58,176] Trial 1602 pruned.
[I 2021-05-07 15:42:58,582] Trial 1603 pruned.
[I 2021-05-07 15:42:58,721] Trial 1604 pruned.
[I 2021-05-07 15:42:59,127] Trial 1605 pruned.
[I 2021-05-07 15:42:59,366] Trial 1606 pruned.
[I 2021-05-07 15:42:59,776] Trial 1607 pruned.
[I 2021-05-07 15:43:00,186] Trial 1608 pruned.
[I 2021-05-07 15:43:00,598] Trial 1609 pruned.
[I 2021-05-07 15:43:00,855] Trial 1610 pruned.
[I 2021-05-07 15:43:01,633] Trial 1611 pruned.
[I 2021-05-07 15:43:01,772] Trial 1612 pruned.
[I 2021-05-07 15:43:02,221] Trial 1613 pruned.
[I 2021-05-07 15:43:02,629] Trial 1614 pruned.
[I 2021-05-07 15:43:03,032] Trial 1615 pruned.
[I 2021-05-07 15:43:03,259] Trial 1616 pruned.
[I 2021-05-07 15:43:04,027] Trial 1617 pruned.
[I 2021-05-07 15:43:04,434] Trial 1618 pruned.
[I 2021-05-07 15:43:04,569] Trial 1619 pruned.
[I 2021-05-07 15:43:04,965] Trial 1620 pruned.
[I 2021-05-07 15:43:05,374] Trial 1621 pruned.
[I 2021-05-07 15:43:05,595] Trial 1622 pruned.
[I 2021-05-07 15:43:06,446] Trial 1623 pruned.
[I 2021-05-07 15:43:07,035] Trial 1624 pruned.
[I 2021-05-07 15:43:07,446] Trial 1625 pruned.
[I 2021-05-07 15:43:07,578] Trial 1626 pruned.
[I 2021-05-07 15:43:08,342] Trial 1627 pruned.
[I 2021-05-07 15:43:08,706] Trial 1628 pruned.
[I 2021-05-07 15:43:09,101] Trial 1629 pruned.
[I 2021-05-07 15:43:45,676] Trial 1630 finished with value: 161.8177947998047 and parameters: {'lr': 0.0025859609778991115, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 762, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:43:45,774] Trial 1631 pruned.
[I 2021-05-07 15:43:46,182] Trial 1632 pruned.
[I 2021-05-07 15:43:46,599] Trial 1633 pruned.
[I 2021-05-07 15:43:46,737] Trial 1634 pruned.
[I 2021-05-07 15:43:46,968] Trial 1635 pruned.
[I 2021-05-07 15:43:47,376] Trial 1636 pruned.
[I 2021-05-07 15:43:47,784] Trial 1637 pruned.
[I 2021-05-07 15:43:48,194] Trial 1638 pruned.
[I 2021-05-07 15:43:50,770] Trial 1639 pruned.
[I 2021-05-07 15:43:50,998] Trial 1640 pruned.
[I 2021-05-07 15:43:52,135] Trial 1641 pruned.
[I 2021-05-07 15:43:52,280] Trial 1642 pruned.
[I 2021-05-07 15:43:52,686] Trial 1643 pruned.
[I 2021-05-07 15:43:53,095] Trial 1644 pruned.
[I 2021-05-07 15:43:53,503] Trial 1645 pruned.
[I 2021-05-07 15:43:53,920] Trial 1646 pruned.
[I 2021-05-07 15:43:54,698] Trial 1647 pruned.
[I 2021-05-07 15:44:32,360] Trial 1648 finished with value: 159.49960327148438 and parameters: {'lr': 0.0025705336098829, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 774, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:44:32,600] Trial 1649 pruned.
[I 2021-05-07 15:44:33,040] Trial 1650 pruned.
[I 2021-05-07 15:44:33,488] Trial 1651 pruned.
[I 2021-05-07 15:44:33,813] Trial 1652 pruned.
[I 2021-05-07 15:44:34,233] Trial 1653 pruned.
[I 2021-05-07 15:44:34,652] Trial 1654 pruned.
[I 2021-05-07 15:44:35,446] Trial 1655 pruned.
[I 2021-05-07 15:44:35,589] Trial 1656 pruned.
[I 2021-05-07 15:44:36,010] Trial 1657 pruned.
[I 2021-05-07 15:44:36,244] Trial 1658 pruned.
[I 2021-05-07 15:44:36,664] Trial 1659 pruned.
[I 2021-05-07 15:44:36,777] Trial 1660 pruned.
[I 2021-05-07 15:44:37,198] Trial 1661 pruned.
[I 2021-05-07 15:44:37,615] Trial 1662 pruned.
[I 2021-05-07 15:44:38,037] Trial 1663 pruned.
[I 2021-05-07 15:44:38,182] Trial 1664 pruned.
[I 2021-05-07 15:44:38,416] Trial 1665 pruned.
[I 2021-05-07 15:44:38,835] Trial 1666 pruned.
[I 2021-05-07 15:44:39,254] Trial 1667 pruned.
[I 2021-05-07 15:44:41,167] Trial 1668 pruned.
[I 2021-05-07 15:44:41,583] Trial 1669 pruned.
[I 2021-05-07 15:44:41,817] Trial 1670 pruned.
[I 2021-05-07 15:44:42,238] Trial 1671 pruned.
[I 2021-05-07 15:44:42,381] Trial 1672 pruned.
[I 2021-05-07 15:44:43,178] Trial 1673 pruned.
[I 2021-05-07 15:44:43,778] Trial 1674 pruned.
[I 2021-05-07 15:44:44,572] Trial 1675 pruned.
[I 2021-05-07 15:44:44,806] Trial 1676 pruned.
[I 2021-05-07 15:44:45,210] Trial 1677 pruned.
[I 2021-05-07 15:44:45,378] Trial 1678 pruned.
[I 2021-05-07 15:44:45,843] Trial 1679 pruned.
[I 2021-05-07 15:44:46,659] Trial 1680 pruned.
[I 2021-05-07 15:44:47,125] Trial 1681 pruned.
[I 2021-05-07 15:44:47,549] Trial 1682 pruned.
[I 2021-05-07 15:44:47,961] Trial 1683 pruned.
[I 2021-05-07 15:44:48,421] Trial 1684 pruned.
[I 2021-05-07 15:44:48,563] Trial 1685 pruned.
[I 2021-05-07 15:44:48,982] Trial 1686 pruned.
[I 2021-05-07 15:44:49,442] Trial 1687 pruned.
[I 2021-05-07 15:44:50,239] Trial 1688 pruned.
[I 2021-05-07 15:44:50,347] Trial 1689 pruned.
[I 2021-05-07 15:44:50,763] Trial 1690 pruned.
[I 2021-05-07 15:44:51,190] Trial 1691 pruned.
[I 2021-05-07 15:44:51,653] Trial 1692 pruned.
[I 2021-05-07 15:44:51,807] Trial 1693 pruned.
[I 2021-05-07 15:44:52,066] Trial 1694 pruned.
[I 2021-05-07 15:44:52,859] Trial 1695 pruned.
[I 2021-05-07 15:44:53,294] Trial 1696 pruned.
[I 2021-05-07 15:44:54,182] Trial 1697 pruned.
[I 2021-05-07 15:45:32,189] Trial 1698 finished with value: 163.23114013671875 and parameters: {'lr': 0.006026845117474345, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 808, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:45:32,428] Trial 1699 pruned.
[I 2021-05-07 15:45:32,853] Trial 1700 pruned.
[I 2021-05-07 15:45:32,996] Trial 1701 pruned.
[I 2021-05-07 15:45:33,791] Trial 1702 pruned.
[I 2021-05-07 15:45:34,596] Trial 1703 pruned.
[I 2021-05-07 15:45:35,015] Trial 1704 pruned.
[I 2021-05-07 15:45:35,439] Trial 1705 pruned.
[I 2021-05-07 15:45:35,882] Trial 1706 pruned.
[I 2021-05-07 15:45:36,310] Trial 1707 pruned.
[I 2021-05-07 15:45:36,453] Trial 1708 pruned.
[I 2021-05-07 15:45:36,873] Trial 1709 pruned.
[I 2021-05-07 15:45:37,304] Trial 1710 pruned.
[I 2021-05-07 15:45:37,540] Trial 1711 pruned.
[I 2021-05-07 15:45:37,964] Trial 1712 pruned.
[I 2021-05-07 15:45:38,758] Trial 1713 pruned.
[I 2021-05-07 15:45:39,181] Trial 1714 pruned.
[I 2021-05-07 15:45:39,327] Trial 1715 pruned.
[I 2021-05-07 15:45:40,137] Trial 1716 pruned.
[I 2021-05-07 15:45:40,386] Trial 1717 pruned.
[I 2021-05-07 15:45:40,483] Trial 1718 pruned.
[I 2021-05-07 15:45:40,918] Trial 1719 pruned.
[I 2021-05-07 15:45:41,339] Trial 1720 pruned.
[I 2021-05-07 15:45:41,770] Trial 1721 pruned.
[I 2021-05-07 15:45:41,915] Trial 1722 pruned.
[I 2021-05-07 15:45:42,344] Trial 1723 pruned.
[I 2021-05-07 15:45:42,795] Trial 1724 pruned.
[I 2021-05-07 15:45:43,035] Trial 1725 pruned.
[I 2021-05-07 15:45:43,473] Trial 1726 pruned.
[I 2021-05-07 15:45:43,913] Trial 1727 pruned.
[I 2021-05-07 15:45:44,568] Trial 1728 pruned.
[I 2021-05-07 15:45:44,994] Trial 1729 pruned.
[I 2021-05-07 15:45:45,141] Trial 1730 pruned.
[I 2021-05-07 15:45:45,578] Trial 1731 pruned.
[I 2021-05-07 15:45:46,414] Trial 1732 pruned.
[I 2021-05-07 15:45:47,218] Trial 1733 pruned.
[I 2021-05-07 15:45:47,448] Trial 1734 pruned.
[I 2021-05-07 15:45:47,919] Trial 1735 pruned.
[I 2021-05-07 15:45:48,752] Trial 1736 pruned.
[I 2021-05-07 15:45:48,902] Trial 1737 pruned.
[I 2021-05-07 15:45:49,335] Trial 1738 pruned.
[I 2021-05-07 15:45:49,774] Trial 1739 pruned.
[I 2021-05-07 15:45:50,028] Trial 1740 pruned.
[I 2021-05-07 15:45:50,495] Trial 1741 pruned.
[I 2021-05-07 15:45:50,971] Trial 1742 pruned.
[I 2021-05-07 15:45:51,405] Trial 1743 pruned.
[I 2021-05-07 15:45:51,569] Trial 1744 pruned.
[I 2021-05-07 15:45:52,046] Trial 1745 pruned.
[I 2021-05-07 15:45:52,323] Trial 1746 pruned.
[I 2021-05-07 15:45:52,418] Trial 1747 pruned.
[I 2021-05-07 15:45:52,892] Trial 1748 pruned.
[I 2021-05-07 15:45:53,364] Trial 1749 pruned.
[I 2021-05-07 15:45:54,222] Trial 1750 pruned.
[I 2021-05-07 15:45:54,388] Trial 1751 pruned.
[I 2021-05-07 15:45:55,277] Trial 1752 pruned.
[I 2021-05-07 15:45:55,536] Trial 1753 pruned.
[I 2021-05-07 15:45:56,020] Trial 1754 pruned.
[I 2021-05-07 15:45:56,529] Trial 1755 pruned.
[I 2021-05-07 15:45:56,997] Trial 1756 pruned.
[I 2021-05-07 15:45:57,511] Trial 1757 pruned.
[I 2021-05-07 15:45:58,463] Trial 1758 pruned.
[I 2021-05-07 15:45:58,613] Trial 1759 pruned.
[I 2021-05-07 15:45:59,023] Trial 1760 pruned.
[I 2021-05-07 15:45:59,462] Trial 1761 pruned.
[I 2021-05-07 15:45:59,892] Trial 1762 pruned.
[I 2021-05-07 15:46:00,331] Trial 1763 pruned.
[I 2021-05-07 15:46:00,796] Trial 1764 pruned.
[I 2021-05-07 15:46:01,743] Trial 1765 pruned.
[I 2021-05-07 15:46:01,896] Trial 1766 pruned.
[I 2021-05-07 15:46:02,370] Trial 1767 pruned.
[I 2021-05-07 15:46:02,822] Trial 1768 pruned.
[I 2021-05-07 15:46:03,054] Trial 1769 pruned.
[I 2021-05-07 15:46:03,745] Trial 1770 pruned.
[I 2021-05-07 15:46:04,160] Trial 1771 pruned.
[I 2021-05-07 15:46:04,601] Trial 1772 pruned.
[I 2021-05-07 15:46:04,744] Trial 1773 pruned.
[I 2021-05-07 15:46:05,173] Trial 1774 pruned.
[I 2021-05-07 15:46:05,430] Trial 1775 pruned.
[I 2021-05-07 15:46:05,884] Trial 1776 pruned.
[I 2021-05-07 15:46:05,981] Trial 1777 pruned.
[I 2021-05-07 15:46:06,863] Trial 1778 pruned.
[I 2021-05-07 15:46:07,330] Trial 1779 pruned.
[I 2021-05-07 15:46:07,483] Trial 1780 pruned.
[I 2021-05-07 15:46:07,909] Trial 1781 pruned.
[I 2021-05-07 15:46:08,160] Trial 1782 pruned.
[I 2021-05-07 15:46:08,592] Trial 1783 pruned.
[I 2021-05-07 15:46:09,008] Trial 1784 pruned.
[I 2021-05-07 15:46:09,487] Trial 1785 pruned.
[I 2021-05-07 15:46:09,740] Trial 1786 pruned.
[I 2021-05-07 15:46:10,163] Trial 1787 pruned.
[I 2021-05-07 15:46:10,321] Trial 1788 pruned.
[I 2021-05-07 15:46:10,738] Trial 1789 pruned.
[I 2021-05-07 15:46:11,180] Trial 1790 pruned.
[I 2021-05-07 15:46:11,646] Trial 1791 pruned.
[I 2021-05-07 15:46:11,884] Trial 1792 pruned.
[I 2021-05-07 15:46:12,713] Trial 1793 pruned.
[I 2021-05-07 15:46:13,522] Trial 1794 pruned.
[I 2021-05-07 15:46:13,680] Trial 1795 pruned.
[I 2021-05-07 15:46:14,099] Trial 1796 pruned.
[I 2021-05-07 15:46:14,972] Trial 1797 pruned.
[I 2021-05-07 15:46:15,207] Trial 1798 pruned.
[I 2021-05-07 15:46:15,671] Trial 1799 pruned.
[I 2021-05-07 15:46:16,080] Trial 1800 pruned.
[I 2021-05-07 15:46:16,980] Trial 1801 pruned.
[I 2021-05-07 15:46:17,134] Trial 1802 pruned.
[I 2021-05-07 15:46:17,577] Trial 1803 pruned.
[I 2021-05-07 15:46:17,812] Trial 1804 pruned.
[I 2021-05-07 15:46:18,276] Trial 1805 pruned.
[I 2021-05-07 15:46:18,371] Trial 1806 pruned.
[I 2021-05-07 15:46:18,837] Trial 1807 pruned.
[I 2021-05-07 15:46:19,304] Trial 1808 pruned.
[I 2021-05-07 15:46:19,731] Trial 1809 pruned.
[I 2021-05-07 15:46:19,879] Trial 1810 pruned.
[I 2021-05-07 15:46:20,123] Trial 1811 pruned.
[I 2021-05-07 15:46:20,939] Trial 1812 pruned.
[I 2021-05-07 15:46:21,362] Trial 1813 pruned.
[I 2021-05-07 15:46:22,221] Trial 1814 pruned.
[I 2021-05-07 15:46:22,459] Trial 1815 pruned.
[I 2021-05-07 15:46:22,918] Trial 1816 pruned.
[I 2021-05-07 15:46:23,081] Trial 1817 pruned.
[I 2021-05-07 15:46:23,518] Trial 1818 pruned.
[I 2021-05-07 15:46:23,989] Trial 1819 pruned.
[I 2021-05-07 15:46:24,409] Trial 1820 pruned.
[I 2021-05-07 15:46:24,651] Trial 1821 pruned.
[I 2021-05-07 15:46:25,112] Trial 1822 pruned.
[I 2021-05-07 15:46:25,529] Trial 1823 pruned.
[I 2021-05-07 15:46:25,681] Trial 1824 pruned.
[I 2021-05-07 15:46:26,465] Trial 1825 pruned.
[I 2021-05-07 15:46:27,671] Trial 1826 pruned.
[I 2021-05-07 15:46:27,922] Trial 1827 pruned.
[I 2021-05-07 15:46:28,394] Trial 1828 pruned.
[I 2021-05-07 15:46:28,862] Trial 1829 pruned.
[I 2021-05-07 15:46:29,560] Trial 1830 pruned.
[I 2021-05-07 15:46:29,704] Trial 1831 pruned.
[I 2021-05-07 15:46:30,128] Trial 1832 pruned.
[I 2021-05-07 15:46:30,364] Trial 1833 pruned.
[I 2021-05-07 15:46:30,784] Trial 1834 pruned.
[I 2021-05-07 15:46:31,216] Trial 1835 pruned.
[I 2021-05-07 15:46:31,324] Trial 1836 pruned.
[I 2021-05-07 15:46:31,754] Trial 1837 pruned.
[I 2021-05-07 15:46:32,170] Trial 1838 pruned.
[I 2021-05-07 15:46:32,328] Trial 1839 pruned.
[I 2021-05-07 15:46:32,589] Trial 1840 pruned.
[I 2021-05-07 15:46:33,012] Trial 1841 pruned.
[I 2021-05-07 15:46:33,467] Trial 1842 pruned.
[I 2021-05-07 15:46:33,906] Trial 1843 pruned.
[I 2021-05-07 15:46:34,793] Trial 1844 pruned.
[I 2021-05-07 15:46:35,263] Trial 1845 pruned.
[I 2021-05-07 15:46:35,412] Trial 1846 pruned.
[I 2021-05-07 15:46:35,837] Trial 1847 pruned.
[I 2021-05-07 15:46:36,267] Trial 1848 pruned.
[I 2021-05-07 15:46:36,725] Trial 1849 pruned.
[I 2021-05-07 15:46:36,973] Trial 1850 pruned.
[I 2021-05-07 15:46:37,805] Trial 1851 pruned.
[I 2021-05-07 15:46:38,257] Trial 1852 pruned.
[I 2021-05-07 15:46:38,405] Trial 1853 pruned.
[I 2021-05-07 15:46:38,827] Trial 1854 pruned.
[I 2021-05-07 15:46:39,295] Trial 1855 pruned.
[I 2021-05-07 15:46:39,560] Trial 1856 pruned.
[I 2021-05-07 15:46:40,453] Trial 1857 pruned.
[I 2021-05-07 15:46:40,890] Trial 1858 pruned.
[I 2021-05-07 15:46:41,360] Trial 1859 pruned.
[I 2021-05-07 15:46:41,506] Trial 1860 pruned.
[I 2021-05-07 15:46:41,921] Trial 1861 pruned.
[I 2021-05-07 15:46:42,161] Trial 1862 pruned.
[I 2021-05-07 15:46:42,842] Trial 1863 pruned.
[I 2021-05-07 15:46:43,277] Trial 1864 pruned.
[I 2021-05-07 15:46:43,727] Trial 1865 pruned.
[I 2021-05-07 15:46:43,823] Trial 1866 pruned.
[I 2021-05-07 15:46:44,276] Trial 1867 pruned.
[I 2021-05-07 15:46:44,422] Trial 1868 pruned.
[I 2021-05-07 15:46:44,669] Trial 1869 pruned.
[I 2021-05-07 15:46:45,083] Trial 1870 pruned.
[I 2021-05-07 15:46:45,557] Trial 1871 pruned.
[I 2021-05-07 15:46:46,010] Trial 1872 pruned.
[I 2021-05-07 15:46:46,431] Trial 1873 pruned.
[I 2021-05-07 15:46:46,693] Trial 1874 pruned.
[I 2021-05-07 15:46:46,834] Trial 1875 pruned.
[I 2021-05-07 15:46:47,277] Trial 1876 pruned.
[I 2021-05-07 15:46:47,699] Trial 1877 pruned.
[I 2021-05-07 15:46:48,128] Trial 1878 pruned.
[I 2021-05-07 15:46:48,384] Trial 1879 pruned.
[I 2021-05-07 15:46:48,854] Trial 1880 pruned.
[I 2021-05-07 15:46:49,275] Trial 1881 pruned.
[I 2021-05-07 15:46:49,417] Trial 1882 pruned.
[I 2021-05-07 15:46:49,840] Trial 1883 pruned.
[I 2021-05-07 15:46:50,274] Trial 1884 pruned.
[I 2021-05-07 15:46:50,531] Trial 1885 pruned.
[I 2021-05-07 15:46:50,961] Trial 1886 pruned.
[I 2021-05-07 15:46:51,417] Trial 1887 pruned.
[I 2021-05-07 15:46:51,870] Trial 1888 pruned.
[I 2021-05-07 15:46:52,034] Trial 1889 pruned.
[I 2021-05-07 15:46:52,454] Trial 1890 pruned.
[I 2021-05-07 15:46:52,722] Trial 1891 pruned.
[I 2021-05-07 15:46:53,143] Trial 1892 pruned.
[I 2021-05-07 15:46:53,594] Trial 1893 pruned.
[I 2021-05-07 15:46:54,397] Trial 1894 pruned.
[I 2021-05-07 15:46:54,504] Trial 1895 pruned.
[I 2021-05-07 15:46:54,922] Trial 1896 pruned.
[I 2021-05-07 15:46:55,063] Trial 1897 pruned.
[I 2021-05-07 15:46:55,321] Trial 1898 pruned.
[I 2021-05-07 15:46:56,119] Trial 1899 pruned.
[I 2021-05-07 15:46:56,943] Trial 1900 pruned.
[I 2021-05-07 15:46:57,407] Trial 1901 pruned.
[I 2021-05-07 15:46:58,229] Trial 1902 pruned.
[I 2021-05-07 15:46:58,461] Trial 1903 pruned.
[I 2021-05-07 15:46:58,617] Trial 1904 pruned.
[I 2021-05-07 15:46:59,062] Trial 1905 pruned.
[I 2021-05-07 15:46:59,482] Trial 1906 pruned.
[I 2021-05-07 15:47:00,385] Trial 1907 pruned.
[I 2021-05-07 15:47:00,622] Trial 1908 pruned.
[I 2021-05-07 15:47:01,513] Trial 1909 pruned.
[I 2021-05-07 15:47:02,313] Trial 1910 pruned.
[I 2021-05-07 15:47:02,463] Trial 1911 pruned.
[I 2021-05-07 15:47:02,917] Trial 1912 pruned.
[I 2021-05-07 15:47:03,340] Trial 1913 pruned.
[I 2021-05-07 15:47:03,601] Trial 1914 pruned.
[I 2021-05-07 15:47:04,025] Trial 1915 pruned.
[I 2021-05-07 15:47:04,478] Trial 1916 pruned.
[I 2021-05-07 15:47:04,894] Trial 1917 pruned.
[I 2021-05-07 15:47:05,038] Trial 1918 pruned.
[I 2021-05-07 15:47:05,464] Trial 1919 pruned.
[I 2021-05-07 15:47:05,725] Trial 1920 pruned.
[I 2021-05-07 15:47:06,201] Trial 1921 pruned.
[I 2021-05-07 15:47:06,302] Trial 1922 pruned.
[I 2021-05-07 15:47:06,764] Trial 1923 pruned.
[I 2021-05-07 15:47:07,187] Trial 1924 pruned.
[I 2021-05-07 15:47:08,084] Trial 1925 pruned.
[I 2021-05-07 15:47:08,221] Trial 1926 pruned.
[I 2021-05-07 15:47:08,463] Trial 1927 pruned.
[I 2021-05-07 15:47:09,651] Trial 1928 pruned.
[I 2021-05-07 15:47:10,119] Trial 1929 pruned.
[I 2021-05-07 15:47:10,543] Trial 1930 pruned.
[I 2021-05-07 15:47:11,346] Trial 1931 pruned.
[I 2021-05-07 15:47:11,598] Trial 1932 pruned.
[I 2021-05-07 15:47:11,797] Trial 1933 pruned.
[I 2021-05-07 15:47:12,273] Trial 1934 pruned.
[I 2021-05-07 15:47:13,439] Trial 1935 pruned.
[I 2021-05-07 15:47:16,830] Trial 1936 pruned.
[I 2021-05-07 15:47:17,092] Trial 1937 pruned.
[I 2021-05-07 15:47:17,555] Trial 1938 pruned.
[I 2021-05-07 15:47:20,077] Trial 1939 pruned.
[I 2021-05-07 15:47:20,226] Trial 1940 pruned.
[I 2021-05-07 15:47:20,656] Trial 1941 pruned.
[I 2021-05-07 15:47:21,352] Trial 1942 pruned.
[I 2021-05-07 15:47:21,595] Trial 1943 pruned.
[I 2021-05-07 15:47:22,065] Trial 1944 pruned.
[I 2021-05-07 15:47:22,548] Trial 1945 pruned.
[I 2021-05-07 15:47:22,972] Trial 1946 pruned.
[I 2021-05-07 15:47:23,115] Trial 1947 pruned.
[I 2021-05-07 15:47:24,283] Trial 1948 pruned.
[I 2021-05-07 15:47:24,520] Trial 1949 pruned.
[I 2021-05-07 15:47:24,942] Trial 1950 pruned.
[I 2021-05-07 15:47:27,265] Trial 1951 pruned.
[I 2021-05-07 15:47:27,364] Trial 1952 pruned.
[I 2021-05-07 15:47:27,819] Trial 1953 pruned.
[I 2021-05-07 15:47:28,660] Trial 1954 pruned.
[I 2021-05-07 15:47:28,819] Trial 1955 pruned.
[I 2021-05-07 15:47:29,075] Trial 1956 pruned.
[I 2021-05-07 15:47:29,488] Trial 1957 pruned.
[I 2021-05-07 15:47:29,904] Trial 1958 pruned.
[I 2021-05-07 15:47:30,337] Trial 1959 pruned.
[I 2021-05-07 15:47:30,793] Trial 1960 pruned.
[I 2021-05-07 15:47:31,038] Trial 1961 pruned.
[I 2021-05-07 15:47:31,186] Trial 1962 pruned.
[I 2021-05-07 15:47:31,618] Trial 1963 pruned.
[I 2021-05-07 15:47:32,078] Trial 1964 pruned.
[I 2021-05-07 15:47:33,280] Trial 1965 pruned.
[I 2021-05-07 15:47:33,613] Trial 1966 pruned.
[I 2021-05-07 15:47:34,082] Trial 1967 pruned.
[I 2021-05-07 15:47:34,555] Trial 1968 pruned.
[I 2021-05-07 15:47:34,712] Trial 1969 pruned.
[I 2021-05-07 15:47:35,403] Trial 1970 pruned.
[I 2021-05-07 15:47:36,294] Trial 1971 pruned.
[I 2021-05-07 15:47:36,535] Trial 1972 pruned.
[I 2021-05-07 15:47:36,955] Trial 1973 pruned.
[I 2021-05-07 15:47:37,385] Trial 1974 pruned.
[I 2021-05-07 15:47:38,180] Trial 1975 pruned.
[I 2021-05-07 15:47:38,326] Trial 1976 pruned.
[I 2021-05-07 15:47:38,761] Trial 1977 pruned.
[I 2021-05-07 15:47:38,989] Trial 1978 pruned.
[I 2021-05-07 15:47:39,912] Trial 1979 pruned.
[I 2021-05-07 15:47:40,336] Trial 1980 pruned.
[I 2021-05-07 15:47:40,441] Trial 1981 pruned.
[I 2021-05-07 15:47:40,899] Trial 1982 pruned.
[I 2021-05-07 15:47:41,374] Trial 1983 pruned.
[I 2021-05-07 15:47:41,532] Trial 1984 pruned.
[I 2021-05-07 15:47:41,771] Trial 1985 pruned.
[I 2021-05-07 15:47:42,674] Trial 1986 pruned.
[I 2021-05-07 15:47:43,520] Trial 1987 pruned.
[I 2021-05-07 15:47:43,956] Trial 1988 pruned.
[I 2021-05-07 15:47:44,381] Trial 1989 pruned.
[I 2021-05-07 15:47:44,626] Trial 1990 pruned.
[I 2021-05-07 15:47:44,773] Trial 1991 pruned.
[I 2021-05-07 15:47:45,204] Trial 1992 pruned.
[I 2021-05-07 15:47:45,632] Trial 1993 pruned.
[I 2021-05-07 15:47:46,801] Trial 1994 pruned.
[I 2021-05-07 15:47:47,062] Trial 1995 pruned.
[I 2021-05-07 15:47:47,513] Trial 1996 pruned.
[I 2021-05-07 15:47:47,942] Trial 1997 pruned.
[I 2021-05-07 15:47:48,087] Trial 1998 pruned.
[I 2021-05-07 15:47:48,977] Trial 1999 pruned.
[I 2021-05-07 15:47:49,776] Trial 2000 pruned.
[I 2021-05-07 15:47:50,220] Trial 2001 pruned.
[I 2021-05-07 15:47:51,405] Trial 2002 pruned.
[I 2021-05-07 15:47:51,876] Trial 2003 pruned.
[I 2021-05-07 15:47:52,343] Trial 2004 pruned.
[I 2021-05-07 15:47:52,497] Trial 2005 pruned.
[I 2021-05-07 15:47:52,933] Trial 2006 pruned.
[I 2021-05-07 15:47:53,164] Trial 2007 pruned.
[I 2021-05-07 15:47:53,640] Trial 2008 pruned.
[I 2021-05-07 15:47:54,068] Trial 2009 pruned.
[I 2021-05-07 15:47:54,171] Trial 2010 pruned.
[I 2021-05-07 15:47:54,591] Trial 2011 pruned.
[I 2021-05-07 15:47:55,019] Trial 2012 pruned.
[I 2021-05-07 15:47:55,184] Trial 2013 pruned.
[I 2021-05-07 15:47:55,525] Trial 2014 pruned.
[I 2021-05-07 15:47:55,961] Trial 2015 pruned.
[I 2021-05-07 15:47:56,822] Trial 2016 pruned.
[I 2021-05-07 15:47:57,259] Trial 2017 pruned.
[I 2021-05-07 15:47:57,684] Trial 2018 pruned.
[I 2021-05-07 15:47:57,947] Trial 2019 pruned.
[I 2021-05-07 15:47:58,095] Trial 2020 pruned.
[I 2021-05-07 15:47:58,950] Trial 2021 pruned.
[I 2021-05-07 15:47:59,772] Trial 2022 pruned.
[I 2021-05-07 15:48:00,241] Trial 2023 pruned.
[I 2021-05-07 15:48:00,484] Trial 2024 pruned.
[I 2021-05-07 15:48:00,944] Trial 2025 pruned.
[I 2021-05-07 15:48:01,366] Trial 2026 pruned.
[I 2021-05-07 15:48:01,520] Trial 2027 pruned.
[I 2021-05-07 15:48:02,439] Trial 2028 pruned.
[I 2021-05-07 15:48:02,862] Trial 2029 pruned.
[I 2021-05-07 15:48:03,103] Trial 2030 pruned.
[I 2021-05-07 15:48:03,538] Trial 2031 pruned.
[I 2021-05-07 15:48:04,004] Trial 2032 pruned.
[I 2021-05-07 15:48:04,440] Trial 2033 pruned.
[I 2021-05-07 15:48:04,602] Trial 2034 pruned.
[I 2021-05-07 15:48:05,036] Trial 2035 pruned.
[I 2021-05-07 15:48:05,292] Trial 2036 pruned.
[I 2021-05-07 15:48:05,730] Trial 2037 pruned.
[I 2021-05-07 15:48:06,185] Trial 2038 pruned.
[I 2021-05-07 15:48:06,293] Trial 2039 pruned.
[I 2021-05-07 15:48:06,726] Trial 2040 pruned.
[I 2021-05-07 15:48:07,185] Trial 2041 pruned.
[I 2021-05-07 15:48:07,353] Trial 2042 pruned.
[I 2021-05-07 15:48:07,594] Trial 2043 pruned.
[I 2021-05-07 15:48:08,024] Trial 2044 pruned.
[I 2021-05-07 15:48:08,446] Trial 2045 pruned.
[I 2021-05-07 15:48:08,883] Trial 2046 pruned.
[I 2021-05-07 15:48:09,505] Trial 2047 pruned.
[I 2021-05-07 15:48:09,926] Trial 2048 pruned.
[I 2021-05-07 15:48:10,078] Trial 2049 pruned.
[I 2021-05-07 15:48:10,530] Trial 2050 pruned.
[I 2021-05-07 15:48:10,966] Trial 2051 pruned.
[I 2021-05-07 15:48:11,422] Trial 2052 pruned.
[I 2021-05-07 15:48:11,660] Trial 2053 pruned.
[I 2021-05-07 15:48:12,344] Trial 2054 pruned.
[I 2021-05-07 15:48:13,260] Trial 2055 pruned.
[I 2021-05-07 15:48:13,405] Trial 2056 pruned.
[I 2021-05-07 15:48:13,827] Trial 2057 pruned.
[I 2021-05-07 15:48:14,252] Trial 2058 pruned.
[I 2021-05-07 15:48:14,501] Trial 2059 pruned.
[I 2021-05-07 15:48:14,938] Trial 2060 pruned.
[I 2021-05-07 15:48:15,361] Trial 2061 pruned.
[I 2021-05-07 15:48:15,795] Trial 2062 pruned.
[I 2021-05-07 15:48:15,944] Trial 2063 pruned.
[I 2021-05-07 15:48:16,388] Trial 2064 pruned.
[I 2021-05-07 15:48:16,647] Trial 2065 pruned.
[I 2021-05-07 15:48:17,443] Trial 2066 pruned.
[I 2021-05-07 15:48:17,883] Trial 2067 pruned.
[I 2021-05-07 15:48:18,348] Trial 2068 pruned.
[I 2021-05-07 15:48:18,450] Trial 2069 pruned.
[I 2021-05-07 15:48:19,337] Trial 2070 pruned.
[I 2021-05-07 15:48:19,499] Trial 2071 pruned.
[I 2021-05-07 15:48:19,750] Trial 2072 pruned.
[I 2021-05-07 15:48:20,197] Trial 2073 pruned.
[I 2021-05-07 15:48:20,624] Trial 2074 pruned.
[I 2021-05-07 15:48:21,056] Trial 2075 pruned.
[I 2021-05-07 15:48:22,411] Trial 2076 pruned.
[I 2021-05-07 15:48:22,654] Trial 2077 pruned.
[I 2021-05-07 15:48:23,096] Trial 2078 pruned.
[I 2021-05-07 15:48:23,260] Trial 2079 pruned.
[I 2021-05-07 15:48:23,697] Trial 2080 pruned.
[I 2021-05-07 15:48:24,122] Trial 2081 pruned.
[I 2021-05-07 15:48:24,395] Trial 2082 pruned.
[I 2021-05-07 15:48:24,835] Trial 2083 pruned.
[I 2021-05-07 15:48:25,264] Trial 2084 pruned.
[I 2021-05-07 15:48:25,415] Trial 2085 pruned.
[I 2021-05-07 15:48:25,855] Trial 2086 pruned.
[I 2021-05-07 15:48:26,483] Trial 2087 pruned.
[I 2021-05-07 15:48:26,721] Trial 2088 pruned.
[I 2021-05-07 15:48:27,136] Trial 2089 pruned.
[I 2021-05-07 15:48:28,022] Trial 2090 pruned.
[I 2021-05-07 15:48:28,727] Trial 2091 pruned.
[I 2021-05-07 15:48:28,879] Trial 2092 pruned.
[I 2021-05-07 15:48:29,308] Trial 2093 pruned.
[I 2021-05-07 15:48:29,553] Trial 2094 pruned.
[I 2021-05-07 15:48:30,013] Trial 2095 pruned.
[I 2021-05-07 15:48:30,470] Trial 2096 pruned.
[I 2021-05-07 15:48:30,575] Trial 2097 pruned.
[I 2021-05-07 15:48:31,047] Trial 2098 pruned.
[I 2021-05-07 15:48:33,331] Trial 2099 pruned.
[I 2021-05-07 15:48:33,501] Trial 2100 pruned.
[I 2021-05-07 15:48:33,755] Trial 2101 pruned.
[I 2021-05-07 15:48:34,565] Trial 2102 pruned.
[I 2021-05-07 15:48:35,007] Trial 2103 pruned.
[I 2021-05-07 15:48:35,434] Trial 2104 pruned.
[I 2021-05-07 15:48:36,768] Trial 2105 pruned.
[I 2021-05-07 15:48:37,011] Trial 2106 pruned.
[I 2021-05-07 15:48:37,835] Trial 2107 pruned.
[I 2021-05-07 15:48:37,988] Trial 2108 pruned.
[I 2021-05-07 15:48:38,882] Trial 2109 pruned.
[I 2021-05-07 15:48:39,313] Trial 2110 pruned.
[I 2021-05-07 15:48:39,777] Trial 2111 pruned.
[I 2021-05-07 15:48:40,022] Trial 2112 pruned.
[I 2021-05-07 15:48:40,473] Trial 2113 pruned.
[I 2021-05-07 15:48:40,643] Trial 2114 pruned.
[I 2021-05-07 15:48:41,072] Trial 2115 pruned.
[I 2021-05-07 15:48:41,518] Trial 2116 pruned.
[I 2021-05-07 15:48:41,751] Trial 2117 pruned.
[I 2021-05-07 15:48:42,216] Trial 2118 pruned.
[I 2021-05-07 15:48:43,434] Trial 2119 pruned.
[I 2021-05-07 15:48:43,864] Trial 2120 pruned.
[I 2021-05-07 15:48:44,079] Trial 2121 pruned.
[I 2021-05-07 15:48:44,510] Trial 2122 pruned.
[I 2021-05-07 15:48:44,755] Trial 2123 pruned.
[I 2021-05-07 15:48:45,179] Trial 2124 pruned.
[I 2021-05-07 15:48:46,087] Trial 2125 pruned.
[I 2021-05-07 15:48:46,192] Trial 2126 pruned.
[I 2021-05-07 15:48:47,002] Trial 2127 pruned.
[I 2021-05-07 15:48:47,797] Trial 2128 pruned.
[I 2021-05-07 15:48:47,966] Trial 2129 pruned.
[I 2021-05-07 15:48:48,215] Trial 2130 pruned.
[I 2021-05-07 15:48:48,638] Trial 2131 pruned.
[I 2021-05-07 15:48:49,075] Trial 2132 pruned.
[I 2021-05-07 15:48:50,361] Trial 2133 pruned.
[I 2021-05-07 15:48:50,791] Trial 2134 pruned.
[I 2021-05-07 15:48:51,048] Trial 2135 pruned.
[I 2021-05-07 15:48:51,488] Trial 2136 pruned.
[I 2021-05-07 15:48:51,638] Trial 2137 pruned.
[I 2021-05-07 15:48:52,507] Trial 2138 pruned.
[I 2021-05-07 15:48:53,331] Trial 2139 pruned.
[I 2021-05-07 15:48:53,796] Trial 2140 pruned.
[I 2021-05-07 15:48:54,035] Trial 2141 pruned.
[I 2021-05-07 15:48:54,463] Trial 2142 pruned.
[I 2021-05-07 15:48:54,689] Trial 2143 pruned.
[I 2021-05-07 15:48:56,006] Trial 2144 pruned.
[I 2021-05-07 15:48:58,102] Trial 2145 pruned.
[I 2021-05-07 15:48:58,348] Trial 2146 pruned.
[I 2021-05-07 15:48:58,959] Trial 2147 pruned.
[I 2021-05-07 15:48:59,795] Trial 2148 pruned.
[I 2021-05-07 15:49:00,260] Trial 2149 pruned.
[I 2021-05-07 15:49:00,410] Trial 2150 pruned.
[I 2021-05-07 15:49:00,870] Trial 2151 pruned.
[I 2021-05-07 15:49:01,116] Trial 2152 pruned.
[I 2021-05-07 15:49:01,575] Trial 2153 pruned.
[I 2021-05-07 15:49:02,403] Trial 2154 pruned.
[I 2021-05-07 15:49:02,852] Trial 2155 pruned.
[I 2021-05-07 15:49:03,280] Trial 2156 pruned.
[I 2021-05-07 15:49:03,434] Trial 2157 pruned.
[I 2021-05-07 15:49:03,546] Trial 2158 pruned.
[I 2021-05-07 15:49:03,792] Trial 2159 pruned.
[I 2021-05-07 15:49:04,260] Trial 2160 pruned.
[I 2021-05-07 15:49:04,749] Trial 2161 pruned.
[I 2021-05-07 15:49:05,180] Trial 2162 pruned.
[I 2021-05-07 15:49:05,609] Trial 2163 pruned.
[I 2021-05-07 15:49:05,851] Trial 2164 pruned.
[I 2021-05-07 15:49:06,278] Trial 2165 pruned.
[I 2021-05-07 15:49:06,430] Trial 2166 pruned.
[I 2021-05-07 15:49:06,894] Trial 2167 pruned.
[I 2021-05-07 15:49:07,714] Trial 2168 pruned.
[I 2021-05-07 15:49:08,146] Trial 2169 pruned.
[I 2021-05-07 15:49:08,409] Trial 2170 pruned.
[I 2021-05-07 15:49:08,885] Trial 2171 pruned.
[I 2021-05-07 15:49:09,041] Trial 2172 pruned.
[I 2021-05-07 15:49:09,470] Trial 2173 pruned.
[I 2021-05-07 15:49:09,921] Trial 2174 pruned.
[I 2021-05-07 15:49:10,169] Trial 2175 pruned.
[I 2021-05-07 15:49:10,635] Trial 2176 pruned.
[I 2021-05-07 15:49:11,072] Trial 2177 pruned.
[I 2021-05-07 15:49:12,248] Trial 2178 pruned.
[I 2021-05-07 15:49:12,415] Trial 2179 pruned.
[I 2021-05-07 15:49:12,845] Trial 2180 pruned.
[I 2021-05-07 15:49:13,088] Trial 2181 pruned.
[I 2021-05-07 15:49:13,559] Trial 2182 pruned.
[I 2021-05-07 15:49:13,988] Trial 2183 pruned.
[I 2021-05-07 15:49:14,427] Trial 2184 pruned.
[I 2021-05-07 15:49:14,531] Trial 2185 pruned.
[I 2021-05-07 15:49:15,395] Trial 2186 pruned.
[I 2021-05-07 15:49:15,560] Trial 2187 pruned.
[I 2021-05-07 15:49:15,808] Trial 2188 pruned.
[I 2021-05-07 15:49:16,246] Trial 2189 pruned.
[I 2021-05-07 15:49:17,154] Trial 2190 pruned.
[I 2021-05-07 15:49:17,615] Trial 2191 pruned.
[I 2021-05-07 15:49:18,050] Trial 2192 pruned.
[I 2021-05-07 15:49:18,301] Trial 2193 pruned.
[I 2021-05-07 15:49:18,725] Trial 2194 pruned.
[I 2021-05-07 15:49:18,873] Trial 2195 pruned.
[I 2021-05-07 15:49:19,302] Trial 2196 pruned.
[I 2021-05-07 15:49:20,191] Trial 2197 pruned.
[I 2021-05-07 15:49:20,657] Trial 2198 pruned.
[I 2021-05-07 15:49:20,915] Trial 2199 pruned.
[I 2021-05-07 15:49:21,347] Trial 2200 pruned.
[I 2021-05-07 15:49:21,558] Trial 2201 pruned.
[I 2021-05-07 15:49:22,000] Trial 2202 pruned.
[I 2021-05-07 15:49:22,458] Trial 2203 pruned.
[I 2021-05-07 15:49:22,703] Trial 2204 pruned.
[I 2021-05-07 15:49:23,136] Trial 2205 pruned.
[I 2021-05-07 15:49:23,612] Trial 2206 pruned.
[I 2021-05-07 15:49:24,407] Trial 2207 pruned.
[I 2021-05-07 15:49:24,560] Trial 2208 pruned.
[I 2021-05-07 15:49:25,366] Trial 2209 pruned.
[I 2021-05-07 15:49:25,608] Trial 2210 pruned.
[I 2021-05-07 15:49:26,082] Trial 2211 pruned.
[I 2021-05-07 15:49:26,554] Trial 2212 pruned.
[I 2021-05-07 15:49:27,376] Trial 2213 pruned.
[I 2021-05-07 15:49:27,806] Trial 2214 pruned.
[I 2021-05-07 15:49:27,959] Trial 2215 pruned.
[I 2021-05-07 15:49:28,077] Trial 2216 pruned.
[I 2021-05-07 15:49:28,330] Trial 2217 pruned.
[I 2021-05-07 15:49:29,105] Trial 2218 pruned.
[I 2021-05-07 15:49:30,000] Trial 2219 pruned.
[I 2021-05-07 15:49:30,700] Trial 2220 pruned.
[I 2021-05-07 15:49:31,177] Trial 2221 pruned.
[I 2021-05-07 15:49:31,431] Trial 2222 pruned.
[I 2021-05-07 15:49:31,862] Trial 2223 pruned.
[I 2021-05-07 15:49:32,027] Trial 2224 pruned.
[I 2021-05-07 15:49:32,438] Trial 2225 pruned.
[I 2021-05-07 15:49:33,277] Trial 2226 pruned.
[I 2021-05-07 15:49:33,706] Trial 2227 pruned.
[I 2021-05-07 15:49:33,951] Trial 2228 pruned.
[I 2021-05-07 15:49:34,853] Trial 2229 pruned.
[I 2021-05-07 15:49:35,018] Trial 2230 pruned.
[I 2021-05-07 15:49:35,454] Trial 2231 pruned.
[I 2021-05-07 15:49:35,887] Trial 2232 pruned.
[I 2021-05-07 15:49:36,151] Trial 2233 pruned.
[I 2021-05-07 15:50:16,782] Trial 2234 finished with value: 163.09848022460938 and parameters: {'lr': 0.001659743807920571, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 928, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:50:17,236] Trial 2235 pruned.
[I 2021-05-07 15:50:17,874] Trial 2236 pruned.
[I 2021-05-07 15:50:18,336] Trial 2237 pruned.
[I 2021-05-07 15:50:18,503] Trial 2238 pruned.
[I 2021-05-07 15:50:18,956] Trial 2239 pruned.
[I 2021-05-07 15:50:19,216] Trial 2240 pruned.
[I 2021-05-07 15:50:19,682] Trial 2241 pruned.
[I 2021-05-07 15:50:20,146] Trial 2242 pruned.
[I 2021-05-07 15:50:20,613] Trial 2243 pruned.
[I 2021-05-07 15:50:20,727] Trial 2244 pruned.
[I 2021-05-07 15:50:21,202] Trial 2245 pruned.
[I 2021-05-07 15:50:21,366] Trial 2246 pruned.
[I 2021-05-07 15:50:21,844] Trial 2247 pruned.
[I 2021-05-07 15:50:22,108] Trial 2248 pruned.
[I 2021-05-07 15:51:02,761] Trial 2249 finished with value: 187.34080505371094 and parameters: {'lr': 0.0017153945263202845, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 926, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:51:03,224] Trial 2250 pruned.
[I 2021-05-07 15:51:03,919] Trial 2251 pruned.
[I 2021-05-07 15:51:04,185] Trial 2252 pruned.
[I 2021-05-07 15:51:04,348] Trial 2253 pruned.
[I 2021-05-07 15:51:04,826] Trial 2254 pruned.
[I 2021-05-07 15:51:05,278] Trial 2255 pruned.
[I 2021-05-07 15:51:05,745] Trial 2256 pruned.
[I 2021-05-07 15:51:06,007] Trial 2257 pruned.
[I 2021-05-07 15:51:06,470] Trial 2258 pruned.
[I 2021-05-07 15:51:06,949] Trial 2259 pruned.
[I 2021-05-07 15:51:07,114] Trial 2260 pruned.
[I 2021-05-07 15:51:07,592] Trial 2261 pruned.
[I 2021-05-07 15:51:08,039] Trial 2262 pruned.
[I 2021-05-07 15:51:08,310] Trial 2263 pruned.
[I 2021-05-07 15:51:08,767] Trial 2264 pruned.
[I 2021-05-07 15:51:09,244] Trial 2265 pruned.
[I 2021-05-07 15:51:09,697] Trial 2266 pruned.
[I 2021-05-07 15:51:09,863] Trial 2267 pruned.
[I 2021-05-07 15:51:10,335] Trial 2268 pruned.
[I 2021-05-07 15:51:10,603] Trial 2269 pruned.
[I 2021-05-07 15:51:11,082] Trial 2270 pruned.
[I 2021-05-07 15:51:11,716] Trial 2271 pruned.
[I 2021-05-07 15:51:12,584] Trial 2272 pruned.
[I 2021-05-07 15:51:12,701] Trial 2273 pruned.
[I 2021-05-07 15:51:13,193] Trial 2274 pruned.
[I 2021-05-07 15:51:13,360] Trial 2275 pruned.
[I 2021-05-07 15:51:13,613] Trial 2276 pruned.
[I 2021-05-07 15:51:14,087] Trial 2277 pruned.
[I 2021-05-07 15:51:14,534] Trial 2278 pruned.
[I 2021-05-07 15:51:15,014] Trial 2279 pruned.
[I 2021-05-07 15:51:15,491] Trial 2280 pruned.
[I 2021-05-07 15:51:15,759] Trial 2281 pruned.
[I 2021-05-07 15:51:16,222] Trial 2282 pruned.
[I 2021-05-07 15:51:16,387] Trial 2283 pruned.
[I 2021-05-07 15:51:16,849] Trial 2284 pruned.
[I 2021-05-07 15:51:17,326] Trial 2285 pruned.
[I 2021-05-07 15:51:17,578] Trial 2286 pruned.
[I 2021-05-07 15:51:18,002] Trial 2287 pruned.
[I 2021-05-07 15:51:18,452] Trial 2288 pruned.
[I 2021-05-07 15:51:18,610] Trial 2289 pruned.
[I 2021-05-07 15:51:19,083] Trial 2290 pruned.
[I 2021-05-07 15:51:19,548] Trial 2291 pruned.
[I 2021-05-07 15:51:41,581] Trial 2292 finished with value: 166.859375 and parameters: {'lr': 0.003824337419755899, 'batch_size': 32, 'n_layers': 2, 'neurons_HL1': 1012, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:51:42,007] Trial 2293 pruned.
[I 2021-05-07 15:51:42,457] Trial 2294 pruned.
[I 2021-05-07 15:51:42,990] Trial 2295 pruned.
[I 2021-05-07 15:51:43,158] Trial 2296 pruned.
[I 2021-05-07 15:51:43,601] Trial 2297 pruned.
[I 2021-05-07 15:51:43,840] Trial 2298 pruned.
[I 2021-05-07 15:51:44,314] Trial 2299 pruned.
[I 2021-05-07 15:51:45,099] Trial 2300 pruned.
[I 2021-05-07 15:51:45,567] Trial 2301 pruned.
[I 2021-05-07 15:51:46,041] Trial 2302 pruned.
[I 2021-05-07 15:51:46,160] Trial 2303 pruned.
[I 2021-05-07 15:51:46,320] Trial 2304 pruned.
[I 2021-05-07 15:51:46,581] Trial 2305 pruned.
[I 2021-05-07 15:51:47,408] Trial 2306 pruned.
[I 2021-05-07 15:51:48,278] Trial 2307 pruned.
[I 2021-05-07 15:51:48,708] Trial 2308 pruned.
[I 2021-05-07 15:51:49,163] Trial 2309 pruned.
[I 2021-05-07 15:51:49,413] Trial 2310 pruned.
[I 2021-05-07 15:51:50,199] Trial 2311 pruned.
[I 2021-05-07 15:51:50,367] Trial 2312 pruned.
[I 2021-05-07 15:51:50,810] Trial 2313 pruned.
[I 2021-05-07 15:51:51,287] Trial 2314 pruned.
[I 2021-05-07 15:51:51,535] Trial 2315 pruned.
[I 2021-05-07 15:51:51,958] Trial 2316 pruned.
[I 2021-05-07 15:51:52,440] Trial 2317 pruned.
[I 2021-05-07 15:51:52,594] Trial 2318 pruned.
[I 2021-05-07 15:51:53,034] Trial 2319 pruned.
[I 2021-05-07 15:51:53,897] Trial 2320 pruned.
[I 2021-05-07 15:51:54,142] Trial 2321 pruned.
[I 2021-05-07 15:51:54,598] Trial 2322 pruned.
[I 2021-05-07 15:51:55,062] Trial 2323 pruned.
[I 2021-05-07 15:51:56,387] Trial 2324 pruned.
[I 2021-05-07 15:51:56,559] Trial 2325 pruned.
[I 2021-05-07 15:51:57,003] Trial 2326 pruned.
[I 2021-05-07 15:51:57,261] Trial 2327 pruned.
[I 2021-05-07 15:51:57,898] Trial 2328 pruned.
[I 2021-05-07 15:51:58,328] Trial 2329 pruned.
[I 2021-05-07 15:51:59,138] Trial 2330 pruned.
[I 2021-05-07 15:51:59,306] Trial 2331 pruned.
[I 2021-05-07 15:51:59,853] Trial 2332 pruned.
[I 2021-05-07 15:52:00,021] Trial 2333 pruned.
[I 2021-05-07 15:52:00,270] Trial 2334 pruned.
[I 2021-05-07 15:52:01,138] Trial 2335 pruned.
[I 2021-05-07 15:52:01,574] Trial 2336 pruned.
[I 2021-05-07 15:52:02,062] Trial 2337 pruned.
[I 2021-05-07 15:52:02,509] Trial 2338 pruned.
[I 2021-05-07 15:52:02,775] Trial 2339 pruned.
[I 2021-05-07 15:52:03,221] Trial 2340 pruned.
[I 2021-05-07 15:52:03,395] Trial 2341 pruned.
[I 2021-05-07 15:52:03,872] Trial 2342 pruned.
[I 2021-05-07 15:52:04,666] Trial 2343 pruned.
[I 2021-05-07 15:52:04,918] Trial 2344 pruned.
[I 2021-05-07 15:52:06,234] Trial 2345 pruned.
[I 2021-05-07 15:52:06,704] Trial 2346 pruned.
[I 2021-05-07 15:52:06,874] Trial 2347 pruned.
[I 2021-05-07 15:52:07,348] Trial 2348 pruned.
[I 2021-05-07 15:52:07,790] Trial 2349 pruned.
[I 2021-05-07 15:52:08,051] Trial 2350 pruned.
[I 2021-05-07 15:52:08,876] Trial 2351 pruned.
[I 2021-05-07 15:52:09,503] Trial 2352 pruned.
[I 2021-05-07 15:52:09,977] Trial 2353 pruned.
[I 2021-05-07 15:52:10,142] Trial 2354 pruned.
[I 2021-05-07 15:52:10,593] Trial 2355 pruned.
[I 2021-05-07 15:52:10,855] Trial 2356 pruned.
[I 2021-05-07 15:52:11,289] Trial 2357 pruned.
[I 2021-05-07 15:52:12,207] Trial 2358 pruned.
[I 2021-05-07 15:52:12,719] Trial 2359 pruned.
[I 2021-05-07 15:52:12,878] Trial 2360 pruned.
[I 2021-05-07 15:52:13,357] Trial 2361 pruned.
[I 2021-05-07 15:52:13,520] Trial 2362 pruned.
[I 2021-05-07 15:52:13,781] Trial 2363 pruned.
[I 2021-05-07 15:52:14,261] Trial 2364 pruned.
[I 2021-05-07 15:52:14,740] Trial 2365 pruned.
[I 2021-05-07 15:52:15,173] Trial 2366 pruned.
[I 2021-05-07 15:52:16,007] Trial 2367 pruned.
[I 2021-05-07 15:52:16,275] Trial 2368 pruned.
[I 2021-05-07 15:52:16,725] Trial 2369 pruned.
[I 2021-05-07 15:52:16,897] Trial 2370 pruned.
[I 2021-05-07 15:52:17,798] Trial 2371 pruned.
[I 2021-05-07 15:52:18,679] Trial 2372 pruned.
[I 2021-05-07 15:52:19,121] Trial 2373 pruned.
[I 2021-05-07 15:52:19,376] Trial 2374 pruned.
[I 2021-05-07 15:52:19,806] Trial 2375 pruned.
[I 2021-05-07 15:52:19,975] Trial 2376 pruned.
[I 2021-05-07 15:52:20,401] Trial 2377 pruned.
[I 2021-05-07 15:52:21,231] Trial 2378 pruned.
[I 2021-05-07 15:52:21,480] Trial 2379 pruned.
[I 2021-05-07 15:52:21,921] Trial 2380 pruned.
[I 2021-05-07 15:52:22,717] Trial 2381 pruned.
[I 2021-05-07 15:52:23,166] Trial 2382 pruned.
[I 2021-05-07 15:52:23,361] Trial 2383 pruned.
[I 2021-05-07 15:52:23,840] Trial 2384 pruned.
[I 2021-05-07 15:52:24,096] Trial 2385 pruned.
[I 2021-05-07 15:52:24,566] Trial 2386 pruned.
[I 2021-05-07 15:52:25,506] Trial 2387 pruned.
[I 2021-05-07 15:52:25,940] Trial 2388 pruned.
[I 2021-05-07 15:52:26,055] Trial 2389 pruned.
[I 2021-05-07 15:52:26,520] Trial 2390 pruned.
[I 2021-05-07 15:52:26,690] Trial 2391 pruned.
[I 2021-05-07 15:52:26,936] Trial 2392 pruned.
[I 2021-05-07 15:52:27,824] Trial 2393 pruned.
[I 2021-05-07 15:52:28,265] Trial 2394 pruned.
[I 2021-05-07 15:52:28,680] Trial 2395 pruned.
[I 2021-05-07 15:52:29,124] Trial 2396 pruned.
[I 2021-05-07 15:52:29,475] Trial 2397 pruned.
[I 2021-05-07 15:52:29,956] Trial 2398 pruned.
[I 2021-05-07 15:52:30,113] Trial 2399 pruned.
[I 2021-05-07 15:52:30,578] Trial 2400 pruned.
[I 2021-05-07 15:52:31,433] Trial 2401 pruned.
[I 2021-05-07 15:52:31,955] Trial 2402 pruned.
[I 2021-05-07 15:52:32,242] Trial 2403 pruned.
[I 2021-05-07 15:52:41,980] Trial 2404 pruned.
[I 2021-05-07 15:52:42,150] Trial 2405 pruned.
[I 2021-05-07 15:52:42,985] Trial 2406 pruned.
[I 2021-05-07 15:52:43,410] Trial 2407 pruned.
[I 2021-05-07 15:52:43,651] Trial 2408 pruned.
[I 2021-05-07 15:52:44,088] Trial 2409 pruned.
[I 2021-05-07 15:52:44,503] Trial 2410 pruned.
[I 2021-05-07 15:52:44,956] Trial 2411 pruned.
[I 2021-05-07 15:52:45,124] Trial 2412 pruned.
[I 2021-05-07 15:52:45,535] Trial 2413 pruned.
[I 2021-05-07 15:52:45,997] Trial 2414 pruned.
[I 2021-05-07 15:52:46,412] Trial 2415 pruned.
[I 2021-05-07 15:52:46,828] Trial 2416 pruned.
[I 2021-05-07 15:52:47,294] Trial 2417 pruned.
[I 2021-05-07 15:52:47,409] Trial 2418 pruned.
[I 2021-05-07 15:52:47,917] Trial 2419 pruned.
[I 2021-05-07 15:52:48,068] Trial 2420 pruned.
[I 2021-05-07 15:52:48,325] Trial 2421 pruned.
[I 2021-05-07 15:52:48,727] Trial 2422 pruned.
[I 2021-05-07 15:52:49,163] Trial 2423 pruned.
[I 2021-05-07 15:52:49,618] Trial 2424 pruned.
[I 2021-05-07 15:52:50,517] Trial 2425 pruned.
[I 2021-05-07 15:52:50,768] Trial 2426 pruned.
[I 2021-05-07 15:52:51,653] Trial 2427 pruned.
[I 2021-05-07 15:52:51,828] Trial 2428 pruned.
[I 2021-05-07 15:52:52,616] Trial 2429 pruned.
[I 2021-05-07 15:52:53,356] Trial 2430 pruned.
[I 2021-05-07 15:52:53,862] Trial 2431 pruned.
[I 2021-05-07 15:52:54,154] Trial 2432 pruned.
[I 2021-05-07 15:52:54,678] Trial 2433 pruned.
[I 2021-05-07 15:52:54,852] Trial 2434 pruned.
[I 2021-05-07 15:52:55,387] Trial 2435 pruned.
[I 2021-05-07 15:52:56,312] Trial 2436 pruned.
[I 2021-05-07 15:52:56,594] Trial 2437 pruned.
[I 2021-05-07 15:52:57,578] Trial 2438 pruned.
[I 2021-05-07 15:52:58,054] Trial 2439 pruned.
[I 2021-05-07 15:52:58,526] Trial 2440 pruned.
[I 2021-05-07 15:52:58,690] Trial 2441 pruned.
[I 2021-05-07 15:52:59,165] Trial 2442 pruned.
[I 2021-05-07 15:52:59,448] Trial 2443 pruned.
[I 2021-05-07 15:52:59,893] Trial 2444 pruned.
[I 2021-05-07 15:53:00,359] Trial 2445 pruned.
[I 2021-05-07 15:53:00,803] Trial 2446 pruned.
[I 2021-05-07 15:53:00,919] Trial 2447 pruned.
[I 2021-05-07 15:53:01,761] Trial 2448 pruned.
[I 2021-05-07 15:53:01,942] Trial 2449 pruned.
[I 2021-05-07 15:53:02,225] Trial 2450 pruned.
[I 2021-05-07 15:53:02,771] Trial 2451 pruned.
[I 2021-05-07 15:53:03,264] Trial 2452 pruned.
[I 2021-05-07 15:53:03,694] Trial 2453 pruned.
[I 2021-05-07 15:53:04,138] Trial 2454 pruned.
[I 2021-05-07 15:53:04,389] Trial 2455 pruned.
[I 2021-05-07 15:53:04,843] Trial 2456 pruned.
[I 2021-05-07 15:53:05,015] Trial 2457 pruned.
[I 2021-05-07 15:53:05,449] Trial 2458 pruned.
[I 2021-05-07 15:53:05,908] Trial 2459 pruned.
[I 2021-05-07 15:53:06,338] Trial 2460 pruned.
[I 2021-05-07 15:53:06,591] Trial 2461 pruned.
[I 2021-05-07 15:53:07,063] Trial 2462 pruned.
[I 2021-05-07 15:53:07,222] Trial 2463 pruned.
[I 2021-05-07 15:53:08,095] Trial 2464 pruned.
[I 2021-05-07 15:53:08,558] Trial 2465 pruned.
[I 2021-05-07 15:53:08,842] Trial 2466 pruned.
[I 2021-05-07 15:53:09,341] Trial 2467 pruned.
[I 2021-05-07 15:53:10,186] Trial 2468 pruned.
[I 2021-05-07 15:53:11,018] Trial 2469 pruned.
[I 2021-05-07 15:53:11,194] Trial 2470 pruned.
[I 2021-05-07 15:53:11,639] Trial 2471 pruned.
[I 2021-05-07 15:53:11,898] Trial 2472 pruned.
[I 2021-05-07 15:53:12,382] Trial 2473 pruned.
[I 2021-05-07 15:53:12,818] Trial 2474 pruned.
[I 2021-05-07 15:53:13,660] Trial 2475 pruned.
[I 2021-05-07 15:53:14,120] Trial 2476 pruned.
[I 2021-05-07 15:53:14,277] Trial 2477 pruned.
[I 2021-05-07 15:53:14,387] Trial 2478 pruned.
[I 2021-05-07 15:53:14,874] Trial 2479 pruned.
[I 2021-05-07 15:53:15,671] Trial 2480 pruned.
[I 2021-05-07 15:53:16,493] Trial 2481 pruned.
[I 2021-05-07 15:53:17,487] Trial 2482 pruned.
[I 2021-05-07 15:53:18,326] Trial 2483 pruned.
[I 2021-05-07 15:53:18,679] Trial 2484 pruned.
[I 2021-05-07 15:53:19,550] Trial 2485 pruned.
[I 2021-05-07 15:53:19,728] Trial 2486 pruned.
[I 2021-05-07 15:53:20,145] Trial 2487 pruned.
[I 2021-05-07 15:53:20,852] Trial 2488 pruned.
[I 2021-05-07 15:53:21,298] Trial 2489 pruned.
[I 2021-05-07 15:53:21,553] Trial 2490 pruned.
[I 2021-05-07 15:53:22,014] Trial 2491 pruned.
[I 2021-05-07 15:53:22,172] Trial 2492 pruned.
[I 2021-05-07 15:53:23,084] Trial 2493 pruned.
[I 2021-05-07 15:53:23,564] Trial 2494 pruned.
[I 2021-05-07 15:53:23,809] Trial 2495 pruned.
[I 2021-05-07 15:53:24,245] Trial 2496 pruned.
[I 2021-05-07 15:53:24,819] Trial 2497 pruned.
[I 2021-05-07 15:53:25,268] Trial 2498 pruned.
[I 2021-05-07 15:53:25,463] Trial 2499 pruned.
[I 2021-05-07 15:53:25,960] Trial 2500 pruned.
[I 2021-05-07 15:53:26,229] Trial 2501 pruned.
[I 2021-05-07 15:53:26,699] Trial 2502 pruned.
[I 2021-05-07 15:53:27,146] Trial 2503 pruned.
[I 2021-05-07 15:53:27,607] Trial 2504 pruned.
[I 2021-05-07 15:53:28,102] Trial 2505 pruned.
[I 2021-05-07 15:53:28,229] Trial 2506 pruned.
[I 2021-05-07 15:53:28,393] Trial 2507 pruned.
[I 2021-05-07 15:53:28,673] Trial 2508 pruned.
[I 2021-05-07 15:53:29,491] Trial 2509 pruned.
[I 2021-05-07 15:53:29,925] Trial 2510 pruned.
[I 2021-05-07 15:53:30,371] Trial 2511 pruned.
[I 2021-05-07 15:53:30,839] Trial 2512 pruned.
[I 2021-05-07 15:53:31,107] Trial 2513 pruned.
[I 2021-05-07 15:53:31,546] Trial 2514 pruned.
[I 2021-05-07 15:53:31,712] Trial 2515 pruned.
[I 2021-05-07 15:53:32,177] Trial 2516 pruned.
[I 2021-05-07 15:53:32,642] Trial 2517 pruned.
[I 2021-05-07 15:53:33,121] Trial 2518 pruned.
[I 2021-05-07 15:53:33,469] Trial 2519 pruned.
[I 2021-05-07 15:53:33,953] Trial 2520 pruned.
[I 2021-05-07 15:53:34,109] Trial 2521 pruned.
[I 2021-05-07 15:53:34,545] Trial 2522 pruned.
[I 2021-05-07 15:53:34,966] Trial 2523 pruned.
[I 2021-05-07 15:53:35,221] Trial 2524 pruned.
[I 2021-05-07 15:53:35,798] Trial 2525 pruned.
[I 2021-05-07 15:53:36,242] Trial 2526 pruned.
[I 2021-05-07 15:53:36,674] Trial 2527 pruned.
[I 2021-05-07 15:53:36,844] Trial 2528 pruned.
[I 2021-05-07 15:53:37,674] Trial 2529 pruned.
[I 2021-05-07 15:53:37,938] Trial 2530 pruned.
[I 2021-05-07 15:53:38,768] Trial 2531 pruned.
[I 2021-05-07 15:54:16,561] Trial 2532 finished with value: 163.87338256835938 and parameters: {'lr': 0.0028540793763172623, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 682, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:54:17,002] Trial 2533 pruned.
[I 2021-05-07 15:54:17,448] Trial 2534 pruned.
[I 2021-05-07 15:54:17,905] Trial 2535 pruned.
[I 2021-05-07 15:54:18,366] Trial 2536 pruned.
[I 2021-05-07 15:54:19,187] Trial 2537 pruned.
[I 2021-05-07 15:54:19,631] Trial 2538 pruned.
[I 2021-05-07 15:54:20,081] Trial 2539 pruned.
[I 2021-05-07 15:54:20,522] Trial 2540 pruned.
[I 2021-05-07 15:54:20,968] Trial 2541 pruned.
[I 2021-05-07 15:54:21,782] Trial 2542 pruned.
[I 2021-05-07 15:54:22,232] Trial 2543 pruned.
[I 2021-05-07 15:54:23,047] Trial 2544 pruned.
[I 2021-05-07 15:54:23,890] Trial 2545 pruned.
[I 2021-05-07 15:54:24,334] Trial 2546 pruned.
[I 2021-05-07 15:54:25,152] Trial 2547 pruned.
[I 2021-05-07 15:54:25,602] Trial 2548 pruned.
[I 2021-05-07 15:54:26,041] Trial 2549 pruned.
[I 2021-05-07 15:54:26,489] Trial 2550 pruned.
[I 2021-05-07 15:54:26,927] Trial 2551 pruned.
[I 2021-05-07 15:54:27,365] Trial 2552 pruned.
[I 2021-05-07 15:54:31,964] Trial 2553 pruned.
[I 2021-05-07 15:54:32,427] Trial 2554 pruned.
[I 2021-05-07 15:54:32,868] Trial 2555 pruned.
[I 2021-05-07 15:54:33,309] Trial 2556 pruned.
[I 2021-05-07 15:54:34,177] Trial 2557 pruned.
[I 2021-05-07 15:54:34,658] Trial 2558 pruned.
[I 2021-05-07 15:54:35,546] Trial 2559 pruned.
[I 2021-05-07 15:54:37,019] Trial 2560 pruned.
[I 2021-05-07 15:54:37,897] Trial 2561 pruned.
[I 2021-05-07 15:54:38,366] Trial 2562 pruned.
[I 2021-05-07 15:54:39,431] Trial 2563 pruned.
[I 2021-05-07 15:54:40,667] Trial 2564 pruned.
[I 2021-05-07 15:54:41,117] Trial 2565 pruned.
[I 2021-05-07 15:54:41,765] Trial 2566 pruned.
[I 2021-05-07 15:54:42,958] Trial 2567 pruned.
[I 2021-05-07 15:54:43,801] Trial 2568 pruned.
[I 2021-05-07 15:54:44,239] Trial 2569 pruned.
[I 2021-05-07 15:54:45,078] Trial 2570 pruned.
[I 2021-05-07 15:54:45,515] Trial 2571 pruned.
[I 2021-05-07 15:54:45,962] Trial 2572 pruned.
[I 2021-05-07 15:54:46,401] Trial 2573 pruned.
[I 2021-05-07 15:54:46,841] Trial 2574 pruned.
[I 2021-05-07 15:54:47,336] Trial 2575 pruned.
[I 2021-05-07 15:54:49,838] Trial 2576 pruned.
[I 2021-05-07 15:54:50,272] Trial 2577 pruned.
[I 2021-05-07 15:54:51,448] Trial 2578 pruned.
[I 2021-05-07 15:54:51,612] Trial 2579 pruned.
[I 2021-05-07 15:54:52,079] Trial 2580 pruned.
[I 2021-05-07 15:54:52,731] Trial 2581 pruned.
[I 2021-05-07 15:54:52,866] Trial 2582 pruned.
[I 2021-05-07 15:54:53,319] Trial 2583 pruned.
[I 2021-05-07 15:54:54,181] Trial 2584 pruned.
[I 2021-05-07 15:54:54,741] Trial 2585 pruned.
[I 2021-05-07 15:54:54,917] Trial 2586 pruned.
[I 2021-05-07 15:54:55,805] Trial 2587 pruned.
[I 2021-05-07 15:54:56,293] Trial 2588 pruned.
[I 2021-05-07 15:54:56,774] Trial 2589 pruned.
[I 2021-05-07 15:54:57,218] Trial 2590 pruned.
[I 2021-05-07 15:54:58,120] Trial 2591 pruned.
[I 2021-05-07 15:54:58,279] Trial 2592 pruned.
[I 2021-05-07 15:54:58,733] Trial 2593 pruned.
[I 2021-05-07 15:54:59,545] Trial 2594 pruned.
[I 2021-05-07 15:55:00,031] Trial 2595 pruned.
[I 2021-05-07 15:55:00,498] Trial 2596 pruned.
[I 2021-05-07 15:55:00,973] Trial 2597 pruned.
[I 2021-05-07 15:55:01,138] Trial 2598 pruned.
[I 2021-05-07 15:55:01,584] Trial 2599 pruned.
[I 2021-05-07 15:55:02,395] Trial 2600 pruned.
[I 2021-05-07 15:55:03,086] Trial 2601 pruned.
[I 2021-05-07 15:55:03,571] Trial 2602 pruned.
[I 2021-05-07 15:55:04,463] Trial 2603 pruned.
[I 2021-05-07 15:55:04,624] Trial 2604 pruned.
[I 2021-05-07 15:55:04,745] Trial 2605 pruned.
[I 2021-05-07 15:55:05,190] Trial 2606 pruned.
[I 2021-05-07 15:55:06,025] Trial 2607 pruned.
[I 2021-05-07 15:55:06,499] Trial 2608 pruned.
[I 2021-05-07 15:55:07,306] Trial 2609 pruned.
[I 2021-05-07 15:55:08,489] Trial 2610 pruned.
[I 2021-05-07 15:55:08,666] Trial 2611 pruned.
[I 2021-05-07 15:55:09,855] Trial 2612 pruned.
[I 2021-05-07 15:55:10,319] Trial 2613 pruned.
[I 2021-05-07 15:55:10,757] Trial 2614 pruned.
[I 2021-05-07 15:55:11,246] Trial 2615 pruned.
[I 2021-05-07 15:55:11,724] Trial 2616 pruned.
[I 2021-05-07 15:55:12,204] Trial 2617 pruned.
[I 2021-05-07 15:55:12,376] Trial 2618 pruned.
[I 2021-05-07 15:55:12,957] Trial 2619 pruned.
[I 2021-05-07 15:55:13,401] Trial 2620 pruned.
[I 2021-05-07 15:55:14,206] Trial 2621 pruned.
[I 2021-05-07 15:55:14,650] Trial 2622 pruned.
[I 2021-05-07 15:55:14,815] Trial 2623 pruned.
[I 2021-05-07 15:55:15,657] Trial 2624 pruned.
[I 2021-05-07 15:55:16,467] Trial 2625 pruned.
[I 2021-05-07 15:55:16,919] Trial 2626 pruned.
[I 2021-05-07 15:55:17,727] Trial 2627 pruned.
[I 2021-05-07 15:55:18,209] Trial 2628 pruned.
[I 2021-05-07 15:55:18,372] Trial 2629 pruned.
[I 2021-05-07 15:55:18,495] Trial 2630 pruned.
[I 2021-05-07 15:55:18,959] Trial 2631 pruned.
[I 2021-05-07 15:55:19,601] Trial 2632 pruned.
[I 2021-05-07 15:55:20,049] Trial 2633 pruned.
[I 2021-05-07 15:55:20,524] Trial 2634 pruned.
[I 2021-05-07 15:55:20,968] Trial 2635 pruned.
[I 2021-05-07 15:55:21,128] Trial 2636 pruned.
[I 2021-05-07 15:55:21,564] Trial 2637 pruned.
[I 2021-05-07 15:55:22,844] Trial 2638 pruned.
[I 2021-05-07 15:55:23,281] Trial 2639 pruned.
[I 2021-05-07 15:55:23,724] Trial 2640 pruned.
[I 2021-05-07 15:55:24,194] Trial 2641 pruned.
[I 2021-05-07 15:55:24,375] Trial 2642 pruned.
[I 2021-05-07 15:55:24,842] Trial 2643 pruned.
[I 2021-05-07 15:55:25,862] Trial 2644 pruned.
[I 2021-05-07 15:55:26,671] Trial 2645 pruned.
[I 2021-05-07 15:55:27,484] Trial 2646 pruned.
[I 2021-05-07 15:55:28,335] Trial 2647 pruned.
[I 2021-05-07 15:55:28,496] Trial 2648 pruned.
[I 2021-05-07 15:55:29,846] Trial 2649 pruned.
[I 2021-05-07 15:55:30,305] Trial 2650 pruned.
[I 2021-05-07 15:55:31,134] Trial 2651 pruned.
[I 2021-05-07 15:55:31,576] Trial 2652 pruned.
[I 2021-05-07 15:55:32,057] Trial 2653 pruned.
[I 2021-05-07 15:55:32,218] Trial 2654 pruned.
[I 2021-05-07 15:55:32,681] Trial 2655 pruned.
[I 2021-05-07 15:55:32,805] Trial 2656 pruned.
[I 2021-05-07 15:55:33,618] Trial 2657 pruned.
[I 2021-05-07 15:55:34,095] Trial 2658 pruned.
[I 2021-05-07 15:55:34,538] Trial 2659 pruned.
[I 2021-05-07 15:55:34,985] Trial 2660 pruned.
[I 2021-05-07 15:55:35,688] Trial 2661 pruned.
[I 2021-05-07 15:55:35,850] Trial 2662 pruned.
[I 2021-05-07 15:55:36,319] Trial 2663 pruned.
[I 2021-05-07 15:55:37,224] Trial 2664 pruned.
[I 2021-05-07 15:55:37,709] Trial 2665 pruned.
[I 2021-05-07 15:55:38,567] Trial 2666 pruned.
[I 2021-05-07 15:55:38,730] Trial 2667 pruned.
[I 2021-05-07 15:55:39,166] Trial 2668 pruned.
[I 2021-05-07 15:55:39,616] Trial 2669 pruned.
[I 2021-05-07 15:55:40,181] Trial 2670 pruned.
[I 2021-05-07 15:55:40,660] Trial 2671 pruned.
[I 2021-05-07 15:55:41,150] Trial 2672 pruned.
[I 2021-05-07 15:55:41,336] Trial 2673 pruned.
[I 2021-05-07 15:55:41,840] Trial 2674 pruned.
[I 2021-05-07 15:55:42,820] Trial 2675 pruned.
[I 2021-05-07 15:55:43,356] Trial 2676 pruned.
[I 2021-05-07 15:56:25,481] Trial 2677 finished with value: 162.28981018066406 and parameters: {'lr': 0.0029617795637011217, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 952, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:56:25,948] Trial 2678 pruned.
[I 2021-05-07 15:56:26,416] Trial 2679 pruned.
[I 2021-05-07 15:56:26,588] Trial 2680 pruned.
[I 2021-05-07 15:56:26,713] Trial 2681 pruned.
[I 2021-05-07 15:56:27,576] Trial 2682 pruned.
[I 2021-05-07 15:56:28,474] Trial 2683 pruned.
[I 2021-05-07 15:56:28,937] Trial 2684 pruned.
[I 2021-05-07 15:56:29,405] Trial 2685 pruned.
[I 2021-05-07 15:56:29,864] Trial 2686 pruned.
[I 2021-05-07 15:56:30,044] Trial 2687 pruned.
[I 2021-05-07 15:56:30,520] Trial 2688 pruned.
[I 2021-05-07 15:56:31,151] Trial 2689 pruned.
[I 2021-05-07 15:56:31,632] Trial 2690 pruned.
[I 2021-05-07 15:56:32,135] Trial 2691 pruned.
[I 2021-05-07 15:56:32,608] Trial 2692 pruned.
[I 2021-05-07 15:56:32,787] Trial 2693 pruned.
[I 2021-05-07 15:56:33,258] Trial 2694 pruned.
[I 2021-05-07 15:56:33,720] Trial 2695 pruned.
[I 2021-05-07 15:56:34,190] Trial 2696 pruned.
[I 2021-05-07 15:56:34,672] Trial 2697 pruned.
[I 2021-05-07 15:56:35,135] Trial 2698 pruned.
[I 2021-05-07 15:56:35,307] Trial 2699 pruned.
[I 2021-05-07 15:56:35,773] Trial 2700 pruned.
[I 2021-05-07 15:56:36,340] Trial 2701 pruned.
[I 2021-05-07 15:56:36,764] Trial 2702 pruned.
[I 2021-05-07 15:56:37,240] Trial 2703 pruned.
[I 2021-05-07 15:56:37,686] Trial 2704 pruned.
[I 2021-05-07 15:56:37,881] Trial 2705 pruned.
[I 2021-05-07 15:56:37,997] Trial 2706 pruned.
[I 2021-05-07 15:56:38,427] Trial 2707 pruned.
[I 2021-05-07 15:56:38,880] Trial 2708 pruned.
[I 2021-05-07 15:56:39,343] Trial 2709 pruned.
[I 2021-05-07 15:56:42,168] Trial 2710 pruned.
[I 2021-05-07 15:56:42,695] Trial 2711 pruned.
[I 2021-05-07 15:56:42,873] Trial 2712 pruned.
[I 2021-05-07 15:56:43,333] Trial 2713 pruned.
[I 2021-05-07 15:56:43,833] Trial 2714 pruned.
[I 2021-05-07 15:56:44,545] Trial 2715 pruned.
[I 2021-05-07 15:56:45,049] Trial 2716 pruned.
[I 2021-05-07 15:56:45,580] Trial 2717 pruned.
[I 2021-05-07 15:56:45,747] Trial 2718 pruned.
[I 2021-05-07 15:56:46,215] Trial 2719 pruned.
[I 2021-05-07 15:56:46,721] Trial 2720 pruned.
[I 2021-05-07 15:56:47,679] Trial 2721 pruned.
[I 2021-05-07 15:56:48,146] Trial 2722 pruned.
[I 2021-05-07 15:56:48,979] Trial 2723 pruned.
[I 2021-05-07 15:56:49,165] Trial 2724 pruned.
[I 2021-05-07 15:56:49,617] Trial 2725 pruned.
[I 2021-05-07 15:56:50,070] Trial 2726 pruned.
[I 2021-05-07 15:56:50,970] Trial 2727 pruned.
[I 2021-05-07 15:56:51,782] Trial 2728 pruned.
[I 2021-05-07 15:56:53,195] Trial 2729 pruned.
[I 2021-05-07 15:56:53,379] Trial 2730 pruned.
[I 2021-05-07 15:56:53,508] Trial 2731 pruned.
[I 2021-05-07 15:56:53,963] Trial 2732 pruned.
[I 2021-05-07 15:56:54,415] Trial 2733 pruned.
[I 2021-05-07 15:56:54,869] Trial 2734 pruned.
[I 2021-05-07 15:56:55,415] Trial 2735 pruned.
[I 2021-05-07 15:56:55,862] Trial 2736 pruned.
[I 2021-05-07 15:56:56,031] Trial 2737 pruned.
[I 2021-05-07 15:56:56,510] Trial 2738 pruned.
[I 2021-05-07 15:56:56,957] Trial 2739 pruned.
[I 2021-05-07 15:56:57,843] Trial 2740 pruned.
[I 2021-05-07 15:56:58,274] Trial 2741 pruned.
[I 2021-05-07 15:56:58,715] Trial 2742 pruned.
[I 2021-05-07 15:56:58,893] Trial 2743 pruned.
[I 2021-05-07 15:56:59,336] Trial 2744 pruned.
[I 2021-05-07 15:56:59,812] Trial 2745 pruned.
[I 2021-05-07 15:57:00,721] Trial 2746 pruned.
[I 2021-05-07 15:57:01,161] Trial 2747 pruned.
[I 2021-05-07 15:57:01,598] Trial 2748 pruned.
[I 2021-05-07 15:57:01,762] Trial 2749 pruned.
[I 2021-05-07 15:57:02,244] Trial 2750 pruned.
[I 2021-05-07 15:57:02,684] Trial 2751 pruned.
[I 2021-05-07 15:57:03,495] Trial 2752 pruned.
[I 2021-05-07 15:57:03,970] Trial 2753 pruned.
[I 2021-05-07 15:57:05,126] Trial 2754 pruned.
[I 2021-05-07 15:57:05,302] Trial 2755 pruned.
[I 2021-05-07 15:57:05,420] Trial 2756 pruned.
[I 2021-05-07 15:57:05,861] Trial 2757 pruned.
[I 2021-05-07 15:57:06,345] Trial 2758 pruned.
[I 2021-05-07 15:57:06,824] Trial 2759 pruned.
[I 2021-05-07 15:57:07,265] Trial 2760 pruned.
[I 2021-05-07 15:57:07,704] Trial 2761 pruned.
[I 2021-05-07 15:57:07,866] Trial 2762 pruned.
[I 2021-05-07 15:57:10,776] Trial 2763 pruned.
[I 2021-05-07 15:57:11,219] Trial 2764 pruned.
[I 2021-05-07 15:57:12,032] Trial 2765 pruned.
[I 2021-05-07 15:57:13,226] Trial 2766 pruned.
[I 2021-05-07 15:57:13,802] Trial 2767 pruned.
[I 2021-05-07 15:57:14,245] Trial 2768 pruned.
[I 2021-05-07 15:57:14,426] Trial 2769 pruned.
[I 2021-05-07 15:57:14,897] Trial 2770 pruned.
[I 2021-05-07 15:57:15,383] Trial 2771 pruned.
[I 2021-05-07 15:57:16,090] Trial 2772 pruned.
[I 2021-05-07 15:57:16,531] Trial 2773 pruned.
[I 2021-05-07 15:57:16,694] Trial 2774 pruned.
[I 2021-05-07 15:57:17,176] Trial 2775 pruned.
[I 2021-05-07 15:57:17,616] Trial 2776 pruned.
[I 2021-05-07 15:57:18,058] Trial 2777 pruned.
[I 2021-05-07 15:57:18,542] Trial 2778 pruned.
[I 2021-05-07 15:58:00,482] Trial 2779 finished with value: 164.32666015625 and parameters: {'lr': 0.002365891840072696, 'batch_size': 16, 'n_layers': 2, 'neurons_HL1': 966, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear'}. Best is trial 82 with value: 158.939208984375.
[I 2021-05-07 15:58:00,647] Trial 2780 pruned.
[I 2021-05-07 15:58:00,765] Trial 2781 pruned.
[I 2021-05-07 15:58:01,209] Trial 2782 pruned.
[I 2021-05-07 15:58:01,644] Trial 2783 pruned.
[I 2021-05-07 15:58:02,156] Trial 2784 pruned.
[I 2021-05-07 15:58:02,652] Trial 2785 pruned.
[I 2021-05-07 15:58:03,104] Trial 2786 pruned.
[I 2021-05-07 15:58:03,280] Trial 2787 pruned.
[I 2021-05-07 15:58:03,742] Trial 2788 pruned.
[I 2021-05-07 15:58:04,194] Trial 2789 pruned.
[I 2021-05-07 15:58:04,689] Trial 2790 pruned.
[I 2021-05-07 15:58:05,528] Trial 2791 pruned.
[I 2021-05-07 15:58:05,986] Trial 2792 pruned.
[I 2021-05-07 15:58:06,160] Trial 2793 pruned.
[I 2021-05-07 15:58:06,604] Trial 2794 pruned.
[I 2021-05-07 15:58:07,703] Trial 2795 pruned.
[I 2021-05-07 15:58:08,887] Trial 2796 pruned.
[I 2021-05-07 15:58:09,765] Trial 2797 pruned.
[I 2021-05-07 15:58:10,217] Trial 2798 pruned.
[I 2021-05-07 15:58:10,398] Trial 2799 pruned.
[I 2021-05-07 15:58:10,848] Trial 2800 pruned.
[I 2021-05-07 15:58:11,285] Trial 2801 pruned.
[I 2021-05-07 15:58:11,725] Trial 2802 pruned.
[I 2021-05-07 15:58:12,208] Trial 2803 pruned.
[I 2021-05-07 15:58:12,649] Trial 2804 pruned.
[I 2021-05-07 15:58:12,831] Trial 2805 pruned.
[I 2021-05-07 15:58:13,271] Trial 2806 pruned.
[I 2021-05-07 15:58:13,710] Trial 2807 pruned.
[I 2021-05-07 15:58:14,163] Trial 2808 pruned.
[I 2021-05-07 15:58:14,283] Trial 2809 pruned.
[I 2021-05-07 15:58:14,782] Trial 2810 pruned.
[I 2021-05-07 15:58:15,251] Trial 2811 pruned.
[I 2021-05-07 15:58:15,427] Trial 2812 pruned.
[I 2021-05-07 15:58:16,683] Trial 2813 pruned.
[I 2021-05-07 15:58:17,133] Trial 2814 pruned.
[I 2021-05-07 15:58:17,584] Trial 2815 pruned.
[I 2021-05-07 15:58:18,400] Trial 2816 pruned.
[I 2021-05-07 15:58:18,879] Trial 2817 pruned.
[I 2021-05-07 15:58:19,050] Trial 2818 pruned.
[I 2021-05-07 15:58:19,494] Trial 2819 pruned.
[I 2021-05-07 15:58:19,987] Trial 2820 pruned.
[I 2021-05-07 15:58:20,462] Trial 2821 pruned.
[I 2021-05-07 15:58:20,998] Trial 2822 pruned.
[I 2021-05-07 15:58:21,468] Trial 2823 pruned.
[I 2021-05-07 15:58:21,655] Trial 2824 pruned.
[I 2021-05-07 15:58:22,105] Trial 2825 pruned.
[I 2021-05-07 15:58:22,582] Trial 2826 pruned.
[I 2021-05-07 15:58:23,031] Trial 2827 pruned.
[I 2021-05-07 15:58:23,475] Trial 2828 pruned.
[I 2021-05-07 15:58:24,191] Trial 2829 pruned.
[I 2021-05-07 15:58:24,360] Trial 2830 pruned.
[I 2021-05-07 15:58:24,802] Trial 2831 pruned.
[I 2021-05-07 15:58:24,926] Trial 2832 pruned.
[I 2021-05-07 15:58:25,409] Trial 2833 pruned.
[I 2021-05-07 15:58:25,903] Trial 2834 pruned.
[I 2021-05-07 15:58:26,778] Trial 2835 pruned.
[I 2021-05-07 15:58:27,039] Trial 2836 pruned.
[I 2021-05-07 15:58:27,504] Trial 2837 pruned.
[I 2021-05-07 15:58:27,675] Trial 2838 pruned.
[I 2021-05-07 15:58:28,125] Trial 2839 pruned.
[I 2021-05-07 15:58:28,625] Trial 2840 pruned.
[I 2021-05-07 15:58:29,063] Trial 2841 pruned.
[I 2021-05-07 15:58:30,018] Trial 2842 pruned.
[I 2021-05-07 15:58:30,287] Trial 2843 pruned.
[I 2021-05-07 15:58:30,966] Trial 2844 pruned.
[I 2021-05-07 15:58:31,432] Trial 2845 pruned.
[I 2021-05-07 15:58:31,604] Trial 2846 pruned.
[I 2021-05-07 15:58:32,463] Trial 2847 pruned.
[I 2021-05-07 15:58:32,954] Trial 2848 pruned.
[I 2021-05-07 15:58:33,394] Trial 2849 pruned.
[I 2021-05-07 15:58:33,663] Trial 2850 pruned.
[I 2021-05-07 15:58:34,182] Trial 2851 pruned.
[I 2021-05-07 15:58:34,391] Trial 2852 pruned.
[I 2021-05-07 15:58:34,882] Trial 2853 pruned.
[I 2021-05-07 15:58:35,353] Trial 2854 pruned.
[I 2021-05-07 15:58:36,189] Trial 2855 pruned.
[I 2021-05-07 15:58:37,024] Trial 2856 pruned.
[I 2021-05-07 15:58:37,304] Trial 2857 pruned.
[I 2021-05-07 15:58:38,120] Trial 2858 pruned.
[I 2021-05-07 15:58:38,289] Trial 2859 pruned.
[I 2021-05-07 15:58:38,763] Trial 2860 pruned.
[I 2021-05-07 15:58:39,204] Trial 2861 pruned.
[I 2021-05-07 15:58:40,029] Trial 2862 pruned.
[I 2021-05-07 15:58:42,330] Trial 2863 pruned.
[I 2021-05-07 15:58:42,591] Trial 2864 pruned.
[I 2021-05-07 15:58:42,724] Trial 2865 pruned.
[I 2021-05-07 15:58:43,195] Trial 2866 pruned.
[I 2021-05-07 15:58:43,421] Trial 2867 pruned.
[I 2021-05-07 15:58:43,861] Trial 2868 pruned.
[I 2021-05-07 15:58:44,352] Trial 2869 pruned.
[I 2021-05-07 15:58:44,842] Trial 2870 pruned.
[I 2021-05-07 15:58:45,724] Trial 2871 pruned.
[I 2021-05-07 15:58:45,989] Trial 2872 pruned.
[I 2021-05-07 15:58:46,465] Trial 2873 pruned.
[I 2021-05-07 15:58:46,635] Trial 2874 pruned.
[I 2021-05-07 15:58:47,446] Trial 2875 pruned.
[I 2021-05-07 15:58:47,889] Trial 2876 pruned.
[I 2021-05-07 15:58:48,375] Trial 2877 pruned.
[I 2021-05-07 15:58:48,821] Trial 2878 pruned.
[I 2021-05-07 15:58:49,103] Trial 2879 pruned.
[I 2021-05-07 15:58:49,593] Trial 2880 pruned.
[I 2021-05-07 15:58:49,768] Trial 2881 pruned.
[I 2021-05-07 15:58:50,588] Trial 2882 pruned.
[I 2021-05-07 15:58:51,032] Trial 2883 pruned.
[I 2021-05-07 15:58:51,482] Trial 2884 pruned.
[I 2021-05-07 15:58:51,956] Trial 2885 pruned.
[I 2021-05-07 15:58:52,272] Trial 2886 pruned.
[I 2021-05-07 15:58:52,759] Trial 2887 pruned.
[I 2021-05-07 15:58:52,926] Trial 2888 pruned.
[I 2021-05-07 15:58:53,379] Trial 2889 pruned.
[I 2021-05-07 15:58:53,499] Trial 2890 pruned.
[I 2021-05-07 15:58:53,983] Trial 2891 pruned.
[I 2021-05-07 15:58:55,259] Trial 2892 pruned.
[I 2021-05-07 15:58:55,696] Trial 2893 pruned.
[I 2021-05-07 15:58:55,976] Trial 2894 pruned.
[I 2021-05-07 15:58:56,419] Trial 2895 pruned.
[I 2021-05-07 15:58:56,921] Trial 2896 pruned.
[I 2021-05-07 15:58:57,087] Trial 2897 pruned.
[I 2021-05-07 15:58:57,926] Trial 2898 pruned.
[I 2021-05-07 15:58:58,641] Trial 2899 pruned.
[I 2021-05-07 15:58:59,082] Trial 2900 pruned.
[I 2021-05-07 15:58:59,369] Trial 2901 pruned.
[I 2021-05-07 15:59:00,195] Trial 2902 pruned.
[I 2021-05-07 15:59:00,373] Trial 2903 pruned.
[I 2021-05-07 15:59:00,815] Trial 2904 pruned.
[I 2021-05-07 15:59:01,293] Trial 2905 pruned.
[I 2021-05-07 15:59:02,119] Trial 2906 pruned.
[I 2021-05-07 15:59:02,561] Trial 2907 pruned.
[I 2021-05-07 15:59:02,849] Trial 2908 pruned.
[I 2021-05-07 15:59:03,292] Trial 2909 pruned.
[I 2021-05-07 15:59:03,470] Trial 2910 pruned.
[I 2021-05-07 15:59:04,022] Trial 2911 pruned.
[I 2021-05-07 15:59:04,463] Trial 2912 pruned.
[I 2021-05-07 15:59:04,938] Trial 2913 pruned.
[I 2021-05-07 15:59:05,413] Trial 2914 pruned.
[I 2021-05-07 15:59:05,671] Trial 2915 pruned.
[I 2021-05-07 15:59:06,160] Trial 2916 pruned.
[I 2021-05-07 15:59:06,325] Trial 2917 pruned.
[I 2021-05-07 15:59:06,778] Trial 2918 pruned.
[I 2021-05-07 15:59:07,266] Trial 2919 pruned.
[I 2021-05-07 15:59:07,431] Trial 2920 pruned.
[I 2021-05-07 15:59:07,909] Trial 2921 pruned.
[I 2021-05-07 15:59:08,352] Trial 2922 pruned.
[I 2021-05-07 15:59:08,615] Trial 2923 pruned.
[I 2021-05-07 15:59:09,054] Trial 2924 pruned.
[I 2021-05-07 15:59:09,223] Trial 2925 pruned.
[I 2021-05-07 15:59:09,717] Trial 2926 pruned.
[I 2021-05-07 15:59:10,527] Trial 2927 pruned.
[I 2021-05-07 15:59:11,025] Trial 2928 pruned.
[I 2021-05-07 15:59:11,499] Trial 2929 pruned.
[I 2021-05-07 15:59:11,758] Trial 2930 pruned.
[I 2021-05-07 15:59:12,201] Trial 2931 pruned.
[I 2021-05-07 15:59:12,372] Trial 2932 pruned.
[I 2021-05-07 15:59:13,244] Trial 2933 pruned.
[I 2021-05-07 15:59:13,717] Trial 2934 pruned.
[I 2021-05-07 15:59:14,173] Trial 2935 pruned.
[I 2021-05-07 15:59:14,660] Trial 2936 pruned.
[I 2021-05-07 15:59:14,920] Trial 2937 pruned.
[I 2021-05-07 15:59:15,371] Trial 2938 pruned.
[I 2021-05-07 15:59:15,537] Trial 2939 pruned.
[I 2021-05-07 15:59:15,990] Trial 2940 pruned.
[I 2021-05-07 15:59:16,464] Trial 2941 pruned.
[I 2021-05-07 15:59:17,176] Trial 2942 pruned.
[I 2021-05-07 15:59:18,292] Trial 2943 pruned.
[I 2021-05-07 15:59:18,559] Trial 2944 pruned.
[I 2021-05-07 15:59:19,012] Trial 2945 pruned.
[I 2021-05-07 15:59:19,194] Trial 2946 pruned.
[I 2021-05-07 15:59:19,634] Trial 2947 pruned.
[I 2021-05-07 15:59:20,087] Trial 2948 pruned.
[I 2021-05-07 15:59:20,218] Trial 2949 pruned.
[I 2021-05-07 15:59:20,721] Trial 2950 pruned.
[I 2021-05-07 15:59:21,162] Trial 2951 pruned.
[I 2021-05-07 15:59:21,802] Trial 2952 pruned.
[I 2021-05-07 15:59:22,708] Trial 2953 pruned.
[I 2021-05-07 15:59:22,891] Trial 2954 pruned.
[I 2021-05-07 15:59:23,342] Trial 2955 pruned.
[I 2021-05-07 15:59:23,785] Trial 2956 pruned.
[I 2021-05-07 15:59:24,227] Trial 2957 pruned.
[I 2021-05-07 15:59:24,717] Trial 2958 pruned.
[I 2021-05-07 15:59:24,976] Trial 2959 pruned.
[I 2021-05-07 15:59:25,456] Trial 2960 pruned.
[I 2021-05-07 15:59:25,639] Trial 2961 pruned.
[I 2021-05-07 15:59:26,096] Trial 2962 pruned.
[I 2021-05-07 15:59:27,096] Trial 2963 pruned.
[I 2021-05-07 15:59:27,549] Trial 2964 pruned.
[I 2021-05-07 15:59:28,468] Trial 2965 pruned.
[I 2021-05-07 15:59:28,949] Trial 2966 pruned.
[I 2021-05-07 15:59:29,405] Trial 2967 pruned.
[I 2021-05-07 15:59:29,571] Trial 2968 pruned.
[I 2021-05-07 15:59:30,748] Trial 2969 pruned.
[I 2021-05-07 15:59:31,249] Trial 2970 pruned.
[I 2021-05-07 15:59:33,741] Trial 2971 pruned.
[I 2021-05-07 15:59:34,221] Trial 2972 pruned.
[I 2021-05-07 15:59:34,734] Trial 2973 pruned.
[I 2021-05-07 15:59:35,200] Trial 2974 pruned.
[I 2021-05-07 15:59:35,398] Trial 2975 pruned.
[I 2021-05-07 15:59:39,168] Trial 2976 pruned.
[I 2021-05-07 15:59:40,026] Trial 2977 pruned.
[I 2021-05-07 15:59:40,154] Trial 2978 pruned.
[I 2021-05-07 15:59:40,646] Trial 2979 pruned.
[I 2021-05-07 15:59:41,115] Trial 2980 pruned.
[I 2021-05-07 15:59:41,381] Trial 2981 pruned.
[I 2021-05-07 15:59:41,849] Trial 2982 pruned.
[I 2021-05-07 15:59:42,039] Trial 2983 pruned.
[I 2021-05-07 15:59:42,482] Trial 2984 pruned.
[I 2021-05-07 15:59:42,957] Trial 2985 pruned.
[I 2021-05-07 15:59:43,405] Trial 2986 pruned.
[I 2021-05-07 15:59:43,860] Trial 2987 pruned.
[I 2021-05-07 15:59:44,137] Trial 2988 pruned.
[I 2021-05-07 15:59:44,639] Trial 2989 pruned.
[I 2021-05-07 15:59:44,808] Trial 2990 pruned.
[I 2021-05-07 15:59:45,301] Trial 2991 pruned.
[I 2021-05-07 15:59:45,772] Trial 2992 pruned.
[I 2021-05-07 15:59:46,220] Trial 2993 pruned.
[I 2021-05-07 15:59:46,672] Trial 2994 pruned.
[I 2021-05-07 15:59:46,929] Trial 2995 pruned.
[I 2021-05-07 15:59:47,373] Trial 2996 pruned.
[I 2021-05-07 15:59:47,609] Trial 2997 pruned.
[I 2021-05-07 15:59:48,195] Trial 2998 pruned.
[I 2021-05-07 15:59:48,686] Trial 2999 pruned.
[I 2021-05-07 15:59:49,162] Trial 3000 pruned.
[I 2021-05-07 15:59:49,602] Trial 3001 pruned.
[I 2021-05-07 15:59:49,868] Trial 3002 pruned.
[I 2021-05-07 15:59:50,323] Trial 3003 pruned.
[I 2021-05-07 15:59:50,494] Trial 3004 pruned.
[I 2021-05-07 15:59:51,312] Trial 3005 pruned.
[I 2021-05-07 15:59:51,788] Trial 3006 pruned.
[I 2021-05-07 15:59:52,282] Trial 3007 pruned.
[I 2021-05-07 15:59:52,459] Trial 3008 pruned.
[I 2021-05-07 15:59:53,309] Trial 3009 pruned.
[I 2021-05-07 15:59:53,567] Trial 3010 pruned.
[I 2021-05-07 15:59:54,048] Trial 3011 pruned.
[I 2021-05-07 15:59:54,230] Trial 3012 pruned.
[I 2021-05-07 15:59:54,727] Trial 3013 pruned.
[I 2021-05-07 15:59:55,169] Trial 3014 pruned.
[I 2021-05-07 15:59:55,985] Trial 3015 pruned.
[I 2021-05-07 15:59:56,440] Trial 3016 pruned.
[I 2021-05-07 15:59:56,719] Trial 3017 pruned.
[I 2021-05-07 15:59:57,164] Trial 3018 pruned.
[I 2021-05-07 15:59:57,354] Trial 3019 pruned.
[I 2021-05-07 15:59:57,797] Trial 3020 pruned.
[I 2021-05-07 15:59:58,281] Trial 3021 pruned.
[I 2021-05-07 15:59:58,728] Trial 3022 pruned.
[I 2021-05-07 15:59:59,224] Trial 3023 pruned.
[I 2021-05-07 15:59:59,555] Trial 3024 pruned.
[I 2021-05-07 16:00:00,014] Trial 3025 pruned.
[I 2021-05-07 16:00:00,205] Trial 3026 pruned.
[I 2021-05-07 16:00:01,148] Trial 3027 pruned.
[I 2021-05-07 16:00:01,656] Trial 3028 pruned.
[I 2021-05-07 16:00:02,530] Trial 3029 pruned.
[I 2021-05-07 16:00:03,420] Trial 3030 pruned.
[I 2021-05-07 16:00:03,687] Trial 3031 pruned.
[I 2021-05-07 16:00:04,131] Trial 3032 pruned.
[I 2021-05-07 16:00:04,323] Trial 3033 pruned.
[I 2021-05-07 16:00:04,796] Trial 3034 pruned.
[I 2021-05-07 16:00:05,258] Trial 3035 pruned.
[I 2021-05-07 16:00:05,428] Trial 3036 pruned.
[I 2021-05-07 16:00:05,918] Trial 3037 pruned.
[I 2021-05-07 16:00:06,855] Trial 3038 pruned.
[I 2021-05-07 16:00:07,149] Trial 3039 pruned.
[I 2021-05-07 16:00:07,592] Trial 3040 pruned.
[I 2021-05-07 16:00:07,778] Trial 3041 pruned.
[I 2021-05-07 16:00:08,237] Trial 3042 pruned.
[I 2021-05-07 16:00:08,693] Trial 3043 pruned.
[I 2021-05-07 16:00:09,589] Trial 3044 pruned.
[I 2021-05-07 16:00:10,392] Trial 3045 pruned.
[I 2021-05-07 16:00:10,657] Trial 3046 pruned.
[I 2021-05-07 16:00:11,101] Trial 3047 pruned.
[I 2021-05-07 16:00:11,282] Trial 3048 pruned.
[I 2021-05-07 16:00:12,476] Trial 3049 pruned.
[I 2021-05-07 16:00:12,923] Trial 3050 pruned.
[I 2021-05-07 16:00:13,415] Trial 3051 pruned.
[I 2021-05-07 16:00:13,859] Trial 3052 pruned.
[I 2021-05-07 16:00:14,151] Trial 3053 pruned.
[I 2021-05-07 16:00:14,599] Trial 3054 pruned.
[I 2021-05-07 16:00:14,783] Trial 3055 pruned.
[I 2021-05-07 16:00:15,237] Trial 3056 pruned.
[I 2021-05-07 16:00:15,679] Trial 3057 pruned.
[I 2021-05-07 16:00:16,178] Trial 3058 pruned.
[I 2021-05-07 16:00:16,675] Trial 3059 pruned.
[I 2021-05-07 16:00:16,940] Trial 3060 pruned.
[I 2021-05-07 16:00:17,452] Trial 3061 pruned.
[I 2021-05-07 16:00:17,622] Trial 3062 pruned.
[I 2021-05-07 16:00:18,463] Trial 3063 pruned.
[I 2021-05-07 16:00:18,907] Trial 3064 pruned.
[I 2021-05-07 16:00:19,096] Trial 3065 pruned.
[I 2021-05-07 16:00:20,177] Trial 3066 pruned.
[I 2021-05-07 16:00:20,624] Trial 3067 pruned.
[I 2021-05-07 16:00:21,087] Trial 3068 pruned.
[I 2021-05-07 16:00:21,520] Trial 3069 pruned.
[I 2021-05-07 16:00:21,757] Trial 3070 pruned.
[I 2021-05-07 16:00:24,333] Trial 3071 pruned.
[I 2021-05-07 16:00:24,836] Trial 3072 pruned.
[I 2021-05-07 16:00:25,325] Trial 3073 pruned.
[I 2021-05-07 16:00:26,174] Trial 3074 pruned.
[I 2021-05-07 16:00:26,455] Trial 3075 pruned.
[I 2021-05-07 16:00:26,952] Trial 3076 pruned.
[I 2021-05-07 16:00:27,126] Trial 3077 pruned.
[I 2021-05-07 16:00:27,978] Trial 3078 pruned.
[I 2021-05-07 16:00:28,463] Trial 3079 pruned.
[I 2021-05-07 16:00:28,940] Trial 3080 pruned.
[I 2021-05-07 16:00:29,489] Trial 3081 pruned.
[I 2021-05-07 16:00:29,749] Trial 3082 pruned.
[I 2021-05-07 16:00:30,252] Trial 3083 pruned.
[I 2021-05-07 16:00:30,548] Trial 3084 pruned.
[I 2021-05-07 16:00:31,392] Trial 3085 pruned.
[I 2021-05-07 16:00:31,881] Trial 3086 pruned.
[I 2021-05-07 16:00:32,355] Trial 3087 pruned.
[I 2021-05-07 16:00:32,863] Trial 3088 pruned.
[I 2021-05-07 16:00:33,130] Trial 3089 pruned.
[I 2021-05-07 16:00:33,652] Trial 3090 pruned.
[I 2021-05-07 16:00:33,837] Trial 3091 pruned.
[I 2021-05-07 16:00:34,284] Trial 3092 pruned.
[I 2021-05-07 16:00:34,739] Trial 3093 pruned.
[I 2021-05-07 16:00:34,865] Trial 3094 pruned.
[I 2021-05-07 16:00:35,332] Trial 3095 pruned.
[I 2021-05-07 16:00:36,190] Trial 3096 pruned.
[I 2021-05-07 16:00:36,449] Trial 3097 pruned.
[I 2021-05-07 16:00:37,275] Trial 3098 pruned.
[I 2021-05-07 16:00:37,478] Trial 3099 pruned.
[I 2021-05-07 16:00:37,956] Trial 3100 pruned.
[I 2021-05-07 16:00:38,778] Trial 3101 pruned.
[I 2021-05-07 16:00:39,272] Trial 3102 pruned.
[I 2021-05-07 16:00:40,074] Trial 3103 pruned.
[I 2021-05-07 16:00:40,350] Trial 3104 pruned.
[I 2021-05-07 16:00:41,205] Trial 3105 pruned.
[I 2021-05-07 16:00:41,385] Trial 3106 pruned.
[I 2021-05-07 16:00:41,864] Trial 3107 pruned.
[I 2021-05-07 16:00:42,749] Trial 3108 pruned.
[I 2021-05-07 16:00:43,186] Trial 3109 pruned.
[I 2021-05-07 16:00:43,728] Trial 3110 pruned.
[I 2021-05-07 16:00:43,997] Trial 3111 pruned.
[I 2021-05-07 16:00:44,944] Trial 3112 pruned.
[I 2021-05-07 16:00:45,148] Trial 3113 pruned.
[I 2021-05-07 16:00:45,690] Trial 3114 pruned.
[I 2021-05-07 16:00:46,184] Trial 3115 pruned.
[I 2021-05-07 16:00:47,170] Trial 3116 pruned.
[I 2021-05-07 16:00:47,701] Trial 3117 pruned.
[I 2021-05-07 16:00:48,077] Trial 3118 pruned.
[I 2021-05-07 16:00:48,557] Trial 3119 pruned.
[I 2021-05-07 16:00:48,754] Trial 3120 pruned.
[I 2021-05-07 16:00:49,227] Trial 3121 pruned.
[I 2021-05-07 16:00:49,702] Trial 3122 pruned.
[I 2021-05-07 16:00:50,162] Trial 3123 pruned.
[I 2021-05-07 16:00:50,305] Trial 3124 pruned.
[I 2021-05-07 16:00:50,807] Trial 3125 pruned.
[I 2021-05-07 16:00:51,075] Trial 3126 pruned.
[I 2021-05-07 16:00:51,562] Trial 3127 pruned.
[I 2021-05-07 16:00:51,753] Trial 3128 pruned.
[I 2021-05-07 16:00:52,622] Trial 3129 pruned.
[I 2021-05-07 16:00:53,081] Trial 3130 pruned.
[I 2021-05-07 16:00:53,539] Trial 3131 pruned.
[I 2021-05-07 16:00:54,481] Trial 3132 pruned.
[I 2021-05-07 16:00:54,755] Trial 3133 pruned.
[I 2021-05-07 16:00:55,256] Trial 3134 pruned.
[I 2021-05-07 16:00:55,441] Trial 3135 pruned.
[I 2021-05-07 16:00:55,917] Trial 3136 pruned.
[I 2021-05-07 16:00:56,387] Trial 3137 pruned.
[I 2021-05-07 16:00:56,859] Trial 3138 pruned.
[I 2021-05-07 16:00:57,820] Trial 3139 pruned.
[I 2021-05-07 16:00:58,096] Trial 3140 pruned.
[I 2021-05-07 16:00:58,590] Trial 3141 pruned.
[I 2021-05-07 16:00:58,761] Trial 3142 pruned.
[I 2021-05-07 16:00:59,319] Trial 3143 pruned.
[I 2021-05-07 16:01:00,053] Trial 3144 pruned.
[I 2021-05-07 16:01:00,485] Trial 3145 pruned.
[I 2021-05-07 16:01:00,933] Trial 3146 pruned.
[I 2021-05-07 16:01:01,183] Trial 3147 pruned.
[I 2021-05-07 16:01:01,811] Trial 3148 pruned.
[I 2021-05-07 16:01:01,989] Trial 3149 pruned.
[I 2021-05-07 16:01:02,453] Trial 3150 pruned.
[I 2021-05-07 16:01:02,931] Trial 3151 pruned.
[I 2021-05-07 16:01:03,731] Trial 3152 pruned.
[I 2021-05-07 16:01:03,862] Trial 3153 pruned.
[I 2021-05-07 16:01:04,316] Trial 3154 pruned.
[I 2021-05-07 16:01:04,573] Trial 3155 pruned.
[I 2021-05-07 16:01:05,399] Trial 3156 pruned.
[I 2021-05-07 16:01:05,586] Trial 3157 pruned.
[I 2021-05-07 16:01:06,445] Trial 3158 pruned.
[I 2021-05-07 16:01:06,892] Trial 3159 pruned.
[I 2021-05-07 16:01:07,328] Trial 3160 pruned.
[I 2021-05-07 16:01:08,230] Trial 3161 pruned.
[I 2021-05-07 16:01:08,486] Trial 3162 pruned.
[I 2021-05-07 16:01:08,969] Trial 3163 pruned.
[I 2021-05-07 16:01:09,152] Trial 3164 pruned.
[I 2021-05-07 16:01:09,598] Trial 3165 pruned.
[I 2021-05-07 16:01:10,041] Trial 3166 pruned.
[I 2021-05-07 16:01:10,837] Trial 3167 pruned.
[I 2021-05-07 16:01:11,276] Trial 3168 pruned.
[I 2021-05-07 16:01:11,539] Trial 3169 pruned.
[I 2021-05-07 16:01:11,998] Trial 3170 pruned.
[I 2021-05-07 16:01:12,179] Trial 3171 pruned.
[I 2021-05-07 16:01:13,340] Trial 3172 pruned.
[I 2021-05-07 16:01:13,827] Trial 3173 pruned.
[I 2021-05-07 16:01:16,372] Trial 3174 pruned.
[I 2021-05-07 16:01:16,916] Trial 3175 pruned.
[I 2021-05-07 16:01:17,242] Trial 3176 pruned.
[I 2021-05-07 16:01:17,726] Trial 3177 pruned.
[I 2021-05-07 16:01:17,921] Trial 3178 pruned.
[I 2021-05-07 16:01:18,391] Trial 3179 pruned.
[I 2021-05-07 16:01:18,891] Trial 3180 pruned.
[I 2021-05-07 16:01:19,377] Trial 3181 pruned.
[I 2021-05-07 16:01:19,498] Trial 3182 pruned.
[I 2021-05-07 16:01:20,010] Trial 3183 pruned.
[I 2021-05-07 16:01:20,473] Trial 3184 pruned.
[I 2021-05-07 16:01:20,922] Trial 3185 pruned.
[I 2021-05-07 16:01:21,104] Trial 3186 pruned.
[I 2021-05-07 16:01:21,546] Trial 3187 pruned.
[I 2021-05-07 16:01:22,389] Trial 3188 pruned.
[I 2021-05-07 16:01:23,293] Trial 3189 pruned.
[I 2021-05-07 16:01:24,470] Trial 3190 pruned.
[I 2021-05-07 16:01:24,962] Trial 3191 pruned.
[I 2021-05-07 16:01:25,431] Trial 3192 pruned.
[I 2021-05-07 16:01:25,599] Trial 3193 pruned.
[I 2021-05-07 16:01:26,034] Trial 3194 pruned.
[I 2021-05-07 16:01:26,473] Trial 3195 pruned.
[I 2021-05-07 16:01:27,285] Trial 3196 pruned.
[I 2021-05-07 16:01:27,748] Trial 3197 pruned.
[I 2021-05-07 16:01:28,029] Trial 3198 pruned.
[I 2021-05-07 16:01:28,833] Trial 3199 pruned.
[I 2021-05-07 16:01:29,002] Trial 3200 pruned.
[I 2021-05-07 16:01:29,537] Trial 3201 pruned.
[I 2021-05-07 16:01:30,384] Trial 3202 pruned.
[I 2021-05-07 16:01:30,822] Trial 3203 pruned.
[I 2021-05-07 16:01:31,291] Trial 3204 pruned.
[I 2021-05-07 16:01:31,576] Trial 3205 pruned.
[I 2021-05-07 16:01:32,001] Trial 3206 pruned.
[I 2021-05-07 16:01:32,167] Trial 3207 pruned.
[I 2021-05-07 16:01:32,613] Trial 3208 pruned.
[I 2021-05-07 16:01:33,891] Trial 3209 pruned.
[I 2021-05-07 16:01:34,015] Trial 3210 pruned.
[I 2021-05-07 16:01:34,455] Trial 3211 pruned.
[I 2021-05-07 16:01:34,876] Trial 3212 pruned.
[I 2021-05-07 16:01:35,159] Trial 3213 pruned.
[I 2021-05-07 16:01:35,958] Trial 3214 pruned.
[I 2021-05-07 16:01:36,136] Trial 3215 pruned.
[I 2021-05-07 16:01:36,572] Trial 3216 pruned.
[I 2021-05-07 16:01:37,052] Trial 3217 pruned.
[I 2021-05-07 16:01:37,497] Trial 3218 pruned.
[I 2021-05-07 16:01:37,933] Trial 3219 pruned.
[I 2021-05-07 16:01:38,197] Trial 3220 pruned.
[I 2021-05-07 16:01:38,670] Trial 3221 pruned.
[I 2021-05-07 16:01:38,836] Trial 3222 pruned.
[I 2021-05-07 16:01:39,314] Trial 3223 pruned.
[I 2021-05-07 16:01:40,127] Trial 3224 pruned.
[I 2021-05-07 16:01:41,010] Trial 3225 pruned.
[I 2021-05-07 16:01:41,446] Trial 3226 pruned.
[I 2021-05-07 16:01:42,364] Trial 3227 pruned.
[I 2021-05-07 16:01:42,788] Trial 3228 pruned.
[I 2021-05-07 16:01:42,955] Trial 3229 pruned.
[I 2021-05-07 16:01:43,400] Trial 3230 pruned.
[I 2021-05-07 16:01:43,836] Trial 3231 pruned.
[I 2021-05-07 16:01:44,313] Trial 3232 pruned.
[I 2021-05-07 16:01:44,794] Trial 3233 pruned.
[I 2021-05-07 16:01:45,251] Trial 3234 pruned.
[I 2021-05-07 16:01:45,697] Trial 3235 pruned.
[I 2021-05-07 16:01:45,873] Trial 3236 pruned.
[I 2021-05-07 16:01:46,348] Trial 3237 pruned.
[I 2021-05-07 16:01:46,916] Trial 3238 pruned.
[I 2021-05-07 16:01:47,045] Trial 3239 pruned.
[I 2021-05-07 16:01:47,492] Trial 3240 pruned.
[I 2021-05-07 16:01:48,291] Trial 3241 pruned.
[I 2021-05-07 16:01:48,550] Trial 3242 pruned.
[I 2021-05-07 16:01:48,985] Trial 3243 pruned.
[I 2021-05-07 16:01:49,170] Trial 3244 pruned.
[I 2021-05-07 16:01:49,991] Trial 3245 pruned.
[I 2021-05-07 16:01:50,429] Trial 3246 pruned.
[I 2021-05-07 16:01:50,913] Trial 3247 pruned.
[I 2021-05-07 16:01:51,713] Trial 3248 pruned.
[I 2021-05-07 16:01:51,982] Trial 3249 pruned.
[I 2021-05-07 16:01:52,514] Trial 3250 pruned.
[I 2021-05-07 16:01:52,701] Trial 3251 pruned.
[I 2021-05-07 16:01:53,189] Trial 3252 pruned.
[I 2021-05-07 16:01:53,625] Trial 3253 pruned.
[I 2021-05-07 16:01:54,505] Trial 3254 pruned.
[I 2021-05-07 16:01:55,663] Trial 3255 pruned.
[I 2021-05-07 16:01:55,912] Trial 3256 pruned.
[I 2021-05-07 16:01:56,735] Trial 3257 pruned.
[I 2021-05-07 16:01:56,915] Trial 3258 pruned.
[I 2021-05-07 16:01:57,628] Trial 3259 pruned.
[I 2021-05-07 16:01:58,496] Trial 3260 pruned.
[I 2021-05-07 16:01:58,980] Trial 3261 pruned.
[I 2021-05-07 16:01:59,456] Trial 3262 pruned.
[I 2021-05-07 16:01:59,715] Trial 3263 pruned.
[I 2021-05-07 16:02:00,675] Trial 3264 pruned.
[I 2021-05-07 16:02:00,870] Trial 3265 pruned.
[I 2021-05-07 16:02:01,325] Trial 3266 pruned.
[I 2021-05-07 16:02:01,791] Trial 3267 pruned.
[I 2021-05-07 16:02:02,233] Trial 3268 pruned.
[I 2021-05-07 16:02:02,372] Trial 3269 pruned.
[I 2021-05-07 16:02:03,187] Trial 3270 pruned.
[I 2021-05-07 16:02:03,456] Trial 3271 pruned.
[I 2021-05-07 16:02:04,109] Trial 3272 pruned.
[I 2021-05-07 16:02:04,293] Trial 3273 pruned.
[I 2021-05-07 16:02:04,771] Trial 3274 pruned.
[I 2021-05-07 16:02:05,238] Trial 3275 pruned.
[I 2021-05-07 16:02:05,722] Trial 3276 pruned.
[I 2021-05-07 16:02:06,186] Trial 3277 pruned.
[I 2021-05-07 16:02:06,453] Trial 3278 pruned.
[I 2021-05-07 16:02:06,936] Trial 3279 pruned.
[I 2021-05-07 16:02:07,110] Trial 3280 pruned.
[I 2021-05-07 16:02:07,609] Trial 3281 pruned.
[I 2021-05-07 16:02:08,453] Trial 3282 pruned.
[I 2021-05-07 16:02:09,653] Trial 3283 pruned.
[I 2021-05-07 16:02:10,215] Trial 3284 pruned.
[I 2021-05-07 16:02:10,486] Trial 3285 pruned.
[I 2021-05-07 16:02:10,977] Trial 3286 pruned.
[I 2021-05-07 16:02:11,167] Trial 3287 pruned.
[I 2021-05-07 16:02:11,996] Trial 3288 pruned.
[I 2021-05-07 16:02:12,450] Trial 3289 pruned.
[I 2021-05-07 16:02:12,900] Trial 3290 pruned.
[I 2021-05-07 16:02:13,394] Trial 3291 pruned.
[I 2021-05-07 16:02:13,654] Trial 3292 pruned.
[I 2021-05-07 16:02:14,136] Trial 3293 pruned.
[I 2021-05-07 16:02:14,419] Trial 3294 pruned.
[I 2021-05-07 16:02:14,914] Trial 3295 pruned.
[I 2021-05-07 16:02:15,381] Trial 3296 pruned.
[I 2021-05-07 16:02:15,524] Trial 3297 pruned.
[I 2021-05-07 16:02:16,361] Trial 3298 pruned.
[I 2021-05-07 16:02:16,857] Trial 3299 pruned.
[I 2021-05-07 16:02:17,130] Trial 3300 pruned.
[I 2021-05-07 16:02:17,787] Trial 3301 pruned.
[I 2021-05-07 16:02:17,984] Trial 3302 pruned.
[I 2021-05-07 16:02:18,436] Trial 3303 pruned.
[I 2021-05-07 16:02:18,910] Trial 3304 pruned.
[I 2021-05-07 16:02:19,379] Trial 3305 pruned.
[I 2021-05-07 16:02:19,827] Trial 3306 pruned.
[I 2021-05-07 16:02:20,114] Trial 3307 pruned.
[I 2021-05-07 16:02:21,021] Trial 3308 pruned.
[I 2021-05-07 16:02:21,201] Trial 3309 pruned.
[I 2021-05-07 16:02:21,640] Trial 3310 pruned.
[I 2021-05-07 16:02:22,471] Trial 3311 pruned.
[I 2021-05-07 16:02:22,917] Trial 3312 pruned.
[I 2021-05-07 16:02:23,409] Trial 3313 pruned.
[I 2021-05-07 16:02:23,673] Trial 3314 pruned.
[I 2021-05-07 16:02:24,142] Trial 3315 pruned.
[I 2021-05-07 16:02:24,318] Trial 3316 pruned.
[I 2021-05-07 16:02:24,893] Trial 3317 pruned.
[I 2021-05-07 16:02:25,381] Trial 3318 pruned.
[I 2021-05-07 16:02:25,867] Trial 3319 pruned.
[I 2021-05-07 16:02:26,302] Trial 3320 pruned.
[I 2021-05-07 16:02:26,760] Trial 3321 pruned.
[I 2021-05-07 16:02:27,208] Trial 3322 pruned.
[I 2021-05-07 16:02:27,386] Trial 3323 pruned.
[I 2021-05-07 16:02:27,825] Trial 3324 pruned.
[I 2021-05-07 16:02:28,638] Trial 3325 pruned.
[I 2021-05-07 16:02:29,532] Trial 3326 pruned.
[I 2021-05-07 16:02:29,972] Trial 3327 pruned.
[I 2021-05-07 16:02:30,262] Trial 3328 pruned.
[I 2021-05-07 16:02:30,393] Trial 3329 pruned.
[I 2021-05-07 16:02:30,828] Trial 3330 pruned.
[I 2021-05-07 16:02:31,005] Trial 3331 pruned.
[I 2021-05-07 16:02:31,450] Trial 3332 pruned.
[I 2021-05-07 16:02:31,927] Trial 3333 pruned.
[I 2021-05-07 16:02:32,403] Trial 3334 pruned.
[I 2021-05-07 16:02:32,858] Trial 3335 pruned.
[I 2021-05-07 16:02:33,145] Trial 3336 pruned.
[I 2021-05-07 16:02:33,587] Trial 3337 pruned.
[I 2021-05-07 16:02:33,766] Trial 3338 pruned.
[I 2021-05-07 16:02:34,237] Trial 3339 pruned.
[I 2021-05-07 16:02:34,780] Trial 3340 pruned.
[I 2021-05-07 16:02:35,273] Trial 3341 pruned.
[I 2021-05-07 16:02:35,719] Trial 3342 pruned.
[I 2021-05-07 16:02:35,987] Trial 3343 pruned.
[I 2021-05-07 16:02:36,790] Trial 3344 pruned.
[I 2021-05-07 16:02:36,974] Trial 3345 pruned.
[I 2021-05-07 16:02:37,468] Trial 3346 pruned.
[I 2021-05-07 16:02:38,365] Trial 3347 pruned.
[I 2021-05-07 16:02:38,818] Trial 3348 pruned.
[I 2021-05-07 16:02:39,263] Trial 3349 pruned.
[I 2021-05-07 16:02:39,715] Trial 3350 pruned.
[I 2021-05-07 16:02:40,195] Trial 3351 pruned.
[I 2021-05-07 16:02:40,379] Trial 3352 pruned.
[I 2021-05-07 16:02:40,836] Trial 3353 pruned.
[I 2021-05-07 16:02:41,311] Trial 3354 pruned.
[I 2021-05-07 16:02:41,497] Trial 3355 pruned.
[I 2021-05-07 16:02:42,317] Trial 3356 pruned.
[I 2021-05-07 16:02:42,811] Trial 3357 pruned.
[I 2021-05-07 16:02:43,087] Trial 3358 pruned.
[I 2021-05-07 16:02:43,893] Trial 3359 pruned.
[I 2021-05-07 16:02:44,617] Trial 3360 pruned.
[I 2021-05-07 16:02:44,792] Trial 3361 pruned.
[I 2021-05-07 16:02:46,756] Trial 3362 pruned.
[I 2021-05-07 16:02:47,230] Trial 3363 pruned.
[I 2021-05-07 16:02:47,662] Trial 3364 pruned.
[I 2021-05-07 16:02:47,930] Trial 3365 pruned.
[I 2021-05-07 16:02:48,379] Trial 3366 pruned.
[I 2021-05-07 16:02:48,570] Trial 3367 pruned.
[I 2021-05-07 16:02:49,048] Trial 3368 pruned.
[I 2021-05-07 16:02:49,485] Trial 3369 pruned.
[I 2021-05-07 16:02:49,935] Trial 3370 pruned.
[I 2021-05-07 16:02:50,379] Trial 3371 pruned.
[I 2021-05-07 16:02:50,658] Trial 3372 pruned.
[I 2021-05-07 16:02:51,102] Trial 3373 pruned.
[I 2021-05-07 16:02:51,293] Trial 3374 pruned.
[I 2021-05-07 16:02:51,748] Trial 3375 pruned.
[I 2021-05-07 16:02:52,825] Trial 3376 pruned.
[I 2021-05-07 16:02:53,312] Trial 3377 pruned.
[I 2021-05-07 16:02:53,753] Trial 3378 pruned.
[I 2021-05-07 16:02:54,042] Trial 3379 pruned.
[I 2021-05-07 16:02:54,496] Trial 3380 pruned.
[I 2021-05-07 16:02:54,672] Trial 3381 pruned.
[I 2021-05-07 16:02:55,933] Trial 3382 pruned.
[I 2021-05-07 16:02:56,413] Trial 3383 pruned.
[I 2021-05-07 16:02:56,856] Trial 3384 pruned.
[I 2021-05-07 16:02:56,989] Trial 3385 pruned.
[I 2021-05-07 16:02:58,123] Trial 3386 pruned.
[I 2021-05-07 16:02:58,414] Trial 3387 pruned.
[I 2021-05-07 16:02:58,854] Trial 3388 pruned.
[I 2021-05-07 16:02:59,042] Trial 3389 pruned.
[I 2021-05-07 16:02:59,484] Trial 3390 pruned.
[I 2021-05-07 16:02:59,914] Trial 3391 pruned.
[I 2021-05-07 16:03:00,418] Trial 3392 pruned.
[I 2021-05-07 16:03:00,893] Trial 3393 pruned.
[I 2021-05-07 16:03:01,185] Trial 3394 pruned.
[I 2021-05-07 16:03:01,725] Trial 3395 pruned.
[I 2021-05-07 16:03:01,903] Trial 3396 pruned.
[I 2021-05-07 16:03:02,732] Trial 3397 pruned.
[I 2021-05-07 16:03:03,186] Trial 3398 pruned.
[I 2021-05-07 16:03:04,015] Trial 3399 pruned.
[I 2021-05-07 16:03:04,458] Trial 3400 pruned.
[I 2021-05-07 16:03:04,742] Trial 3401 pruned.
[I 2021-05-07 16:03:05,200] Trial 3402 pruned.
[I 2021-05-07 16:03:05,386] Trial 3403 pruned.
[I 2021-05-07 16:03:05,840] Trial 3404 pruned.
[I 2021-05-07 16:03:06,284] Trial 3405 pruned.
[I 2021-05-07 16:03:06,758] Trial 3406 pruned.
[I 2021-05-07 16:03:07,209] Trial 3407 pruned.
[I 2021-05-07 16:03:07,473] Trial 3408 pruned.
[I 2021-05-07 16:03:07,947] Trial 3409 pruned.
[I 2021-05-07 16:03:08,140] Trial 3410 pruned.
[I 2021-05-07 16:03:08,623] Trial 3411 pruned.
[I 2021-05-07 16:03:09,356] Trial 3412 pruned.
[I 2021-05-07 16:03:09,799] Trial 3413 pruned.
[I 2021-05-07 16:03:10,286] Trial 3414 pruned.
[I 2021-05-07 16:03:10,546] Trial 3415 pruned.
[I 2021-05-07 16:03:10,678] Trial 3416 pruned.
[I 2021-05-07 16:03:11,171] Trial 3417 pruned.
[I 2021-05-07 16:03:11,347] Trial 3418 pruned.
[I 2021-05-07 16:03:11,793] Trial 3419 pruned.
[I 2021-05-07 16:03:12,239] Trial 3420 pruned.
[I 2021-05-07 16:03:12,710] Trial 3421 pruned.
[I 2021-05-07 16:03:13,193] Trial 3422 pruned.
[I 2021-05-07 16:03:13,826] Trial 3423 pruned.
[I 2021-05-07 16:03:14,721] Trial 3424 pruned.
[I 2021-05-07 16:03:14,909] Trial 3425 pruned.
[I 2021-05-07 16:03:15,753] Trial 3426 pruned.
[I 2021-05-07 16:03:16,388] Trial 3427 pruned.
[I 2021-05-07 16:03:18,284] Trial 3428 pruned.
[I 2021-05-07 16:03:18,775] Trial 3429 pruned.
[I 2021-05-07 16:03:19,041] Trial 3430 pruned.
[I 2021-05-07 16:03:19,497] Trial 3431 pruned.
[I 2021-05-07 16:03:19,691] Trial 3432 pruned.
[I 2021-05-07 16:03:20,495] Trial 3433 pruned.
[I 2021-05-07 16:03:20,957] Trial 3434 pruned.
[I 2021-05-07 16:03:21,518] Trial 3435 pruned.
[I 2021-05-07 16:03:22,017] Trial 3436 pruned.
[I 2021-05-07 16:03:22,281] Trial 3437 pruned.
[I 2021-05-07 16:03:22,759] Trial 3438 pruned.
[I 2021-05-07 16:03:22,939] Trial 3439 pruned.
[I 2021-05-07 16:03:23,386] Trial 3440 pruned.
[I 2021-05-07 16:03:23,886] Trial 3441 pruned.
[I 2021-05-07 16:03:24,790] Trial 3442 pruned.
[I 2021-05-07 16:03:25,247] Trial 3443 pruned.
[I 2021-05-07 16:03:25,514] Trial 3444 pruned.
[I 2021-05-07 16:03:25,645] Trial 3445 pruned.
[I 2021-05-07 16:03:26,126] Trial 3446 pruned.
[I 2021-05-07 16:03:26,301] Trial 3447 pruned.
[I 2021-05-07 16:03:26,779] Trial 3448 pruned.
[I 2021-05-07 16:03:27,262] Trial 3449 pruned.
[I 2021-05-07 16:03:28,070] Trial 3450 pruned.
[I 2021-05-07 16:03:28,538] Trial 3451 pruned.
[I 2021-05-07 16:03:28,819] Trial 3452 pruned.
[I 2021-05-07 16:03:29,275] Trial 3453 pruned.
[I 2021-05-07 16:03:29,452] Trial 3454 pruned.
[I 2021-05-07 16:03:30,264] Trial 3455 pruned.
[I 2021-05-07 16:03:31,092] Trial 3456 pruned.
[I 2021-05-07 16:03:31,581] Trial 3457 pruned.
[I 2021-05-07 16:03:32,068] Trial 3458 pruned.
[I 2021-05-07 16:03:32,484] Trial 3459 pruned.
[I 2021-05-07 16:03:32,966] Trial 3460 pruned.
[I 2021-05-07 16:03:33,144] Trial 3461 pruned.
[I 2021-05-07 16:03:33,624] Trial 3462 pruned.
[I 2021-05-07 16:03:34,074] Trial 3463 pruned.
[I 2021-05-07 16:03:34,520] Trial 3464 pruned.
[I 2021-05-07 16:03:35,110] Trial 3465 pruned.
[I 2021-05-07 16:03:35,395] Trial 3466 pruned.
[I 2021-05-07 16:03:36,199] Trial 3467 pruned.
[I 2021-05-07 16:03:36,380] Trial 3468 pruned.
[I 2021-05-07 16:03:36,826] Trial 3469 pruned.
[I 2021-05-07 16:03:37,331] Trial 3470 pruned.
[I 2021-05-07 16:03:38,216] Trial 3471 pruned.
[I 2021-05-07 16:03:38,660] Trial 3472 pruned.
[I 2021-05-07 16:03:38,923] Trial 3473 pruned.
[I 2021-05-07 16:03:39,369] Trial 3474 pruned.
[I 2021-05-07 16:03:39,504] Trial 3475 pruned.
[I 2021-05-07 16:03:39,681] Trial 3476 pruned.
[I 2021-05-07 16:03:40,137] Trial 3477 pruned.
[I 2021-05-07 16:03:40,625] Trial 3478 pruned.
[I 2021-05-07 16:03:41,265] Trial 3479 pruned.
[I 2021-05-07 16:03:41,727] Trial 3480 pruned.
[I 2021-05-07 16:03:42,014] Trial 3481 pruned.
[I 2021-05-07 16:03:42,490] Trial 3482 pruned.
[I 2021-05-07 16:03:42,669] Trial 3483 pruned.
[I 2021-05-07 16:03:43,111] Trial 3484 pruned.
[I 2021-05-07 16:03:43,570] Trial 3485 pruned.
[I 2021-05-07 16:03:44,378] Trial 3486 pruned.
[I 2021-05-07 16:03:44,862] Trial 3487 pruned.
[I 2021-05-07 16:03:45,368] Trial 3488 pruned.
[I 2021-05-07 16:03:45,894] Trial 3489 pruned.
[I 2021-05-07 16:03:46,081] Trial 3490 pruned.
[I 2021-05-07 16:03:46,970] Trial 3491 pruned.
[I 2021-05-07 16:03:47,462] Trial 3492 pruned.
[I 2021-05-07 16:03:47,921] Trial 3493 pruned.
[I 2021-05-07 16:03:48,367] Trial 3494 pruned.
[I 2021-05-07 16:03:48,640] Trial 3495 pruned.
[I 2021-05-07 16:03:49,086] Trial 3496 pruned.
[I 2021-05-07 16:03:49,279] Trial 3497 pruned.
[I 2021-05-07 16:03:49,756] Trial 3498 pruned.
[I 2021-05-07 16:03:50,199] Trial 3499 pruned.
[I 2021-05-07 16:03:50,653] Trial 3500 pruned.
[I 2021-05-07 16:03:50,784] Trial 3501 pruned.
[I 2021-05-07 16:03:51,696] Trial 3502 pruned.
[I 2021-05-07 16:03:51,983] Trial 3503 pruned.
[I 2021-05-07 16:03:52,428] Trial 3504 pruned.
[I 2021-05-07 16:03:52,612] Trial 3505 pruned.
[I 2021-05-07 16:03:53,488] Trial 3506 pruned.
[I 2021-05-07 16:03:53,957] Trial 3507 pruned.
[I 2021-05-07 16:03:54,766] Trial 3508 pruned.
[I 2021-05-07 16:03:55,202] Trial 3509 pruned.
[I 2021-05-07 16:03:55,579] Trial 3510 pruned.
[I 2021-05-07 16:03:56,032] Trial 3511 pruned.
[I 2021-05-07 16:03:56,214] Trial 3512 pruned.
[I 2021-05-07 16:03:56,708] Trial 3513 pruned.
[I 2021-05-07 16:03:57,185] Trial 3514 pruned.
[I 2021-05-07 16:03:57,648] Trial 3515 pruned.
[I 2021-05-07 16:03:58,101] Trial 3516 pruned.
[I 2021-05-07 16:03:58,375] Trial 3517 pruned.
[I 2021-05-07 16:03:58,815] Trial 3518 pruned.
[I 2021-05-07 16:03:59,011] Trial 3519 pruned.
[I 2021-05-07 16:03:59,845] Trial 3520 pruned.
[I 2021-05-07 16:04:45,960] Trial 3521 finished with value: 157.6289520263672 and parameters: {'lr': 0.0015616839168463073, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 676, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'linear', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:04:46,772] Trial 3522 pruned.
[I 2021-05-07 16:04:47,839] Trial 3523 pruned.
[I 2021-05-07 16:04:48,387] Trial 3524 pruned.
[I 2021-05-07 16:04:49,891] Trial 3525 pruned.
[I 2021-05-07 16:04:50,100] Trial 3526 pruned.
[I 2021-05-07 16:04:50,648] Trial 3527 pruned.
[I 2021-05-07 16:04:50,976] Trial 3528 pruned.
[I 2021-05-07 16:04:51,525] Trial 3529 pruned.
[I 2021-05-07 16:04:52,061] Trial 3530 pruned.
[I 2021-05-07 16:04:52,215] Trial 3531 pruned.
[I 2021-05-07 16:04:52,762] Trial 3532 pruned.
[I 2021-05-07 16:04:53,845] Trial 3533 pruned.
[I 2021-05-07 16:04:54,179] Trial 3534 pruned.
[I 2021-05-07 16:04:54,396] Trial 3535 pruned.
[I 2021-05-07 16:04:54,943] Trial 3536 pruned.
[I 2021-05-07 16:04:55,393] Trial 3537 pruned.
[I 2021-05-07 16:04:55,945] Trial 3538 pruned.
[I 2021-05-07 16:04:56,396] Trial 3539 pruned.
[I 2021-05-07 16:04:56,852] Trial 3540 pruned.
[I 2021-05-07 16:04:57,120] Trial 3541 pruned.
[I 2021-05-07 16:04:57,328] Trial 3542 pruned.
[I 2021-05-07 16:04:57,887] Trial 3543 pruned.
[I 2021-05-07 16:04:58,889] Trial 3544 pruned.
[I 2021-05-07 16:04:59,350] Trial 3545 pruned.
[I 2021-05-07 16:04:59,799] Trial 3546 pruned.
[I 2021-05-07 16:05:00,353] Trial 3547 pruned.
[I 2021-05-07 16:05:00,669] Trial 3548 pruned.
[I 2021-05-07 16:05:00,880] Trial 3549 pruned.
[I 2021-05-07 16:05:01,436] Trial 3550 pruned.
[I 2021-05-07 16:05:01,882] Trial 3551 pruned.
[I 2021-05-07 16:05:02,443] Trial 3552 pruned.
[I 2021-05-07 16:05:02,984] Trial 3553 pruned.
[I 2021-05-07 16:05:03,988] Trial 3554 pruned.
[I 2021-05-07 16:05:04,310] Trial 3555 pruned.
[I 2021-05-07 16:05:04,519] Trial 3556 pruned.
[I 2021-05-07 16:05:05,069] Trial 3557 pruned.
[I 2021-05-07 16:05:05,516] Trial 3558 pruned.
[I 2021-05-07 16:05:06,065] Trial 3559 pruned.
[I 2021-05-07 16:05:06,221] Trial 3560 pruned.
[I 2021-05-07 16:05:06,778] Trial 3561 pruned.
[I 2021-05-07 16:05:07,779] Trial 3562 pruned.
[I 2021-05-07 16:05:08,100] Trial 3563 pruned.
[I 2021-05-07 16:05:08,280] Trial 3564 pruned.
[I 2021-05-07 16:05:08,842] Trial 3565 pruned.
[I 2021-05-07 16:05:09,847] Trial 3566 pruned.
[I 2021-05-07 16:05:10,396] Trial 3567 pruned.
[I 2021-05-07 16:05:10,943] Trial 3568 pruned.
[I 2021-05-07 16:05:11,395] Trial 3569 pruned.
[I 2021-05-07 16:05:11,960] Trial 3570 pruned.
[I 2021-05-07 16:05:12,171] Trial 3571 pruned.
[I 2021-05-07 16:05:12,722] Trial 3572 pruned.
[I 2021-05-07 16:05:14,173] Trial 3573 pruned.
[I 2021-05-07 16:05:14,624] Trial 3574 pruned.
[I 2021-05-07 16:05:15,084] Trial 3575 pruned.
[I 2021-05-07 16:05:15,541] Trial 3576 pruned.
[I 2021-05-07 16:05:15,817] Trial 3577 pruned.
[I 2021-05-07 16:05:15,996] Trial 3578 pruned.
[I 2021-05-07 16:05:16,832] Trial 3579 pruned.
[I 2021-05-07 16:05:17,283] Trial 3580 pruned.
[I 2021-05-07 16:05:17,831] Trial 3581 pruned.
[I 2021-05-07 16:05:18,293] Trial 3582 pruned.
[I 2021-05-07 16:05:18,744] Trial 3583 pruned.
[I 2021-05-07 16:05:18,937] Trial 3584 pruned.
[I 2021-05-07 16:05:19,392] Trial 3585 pruned.
[I 2021-05-07 16:05:19,666] Trial 3586 pruned.
[I 2021-05-07 16:05:20,172] Trial 3587 pruned.
[I 2021-05-07 16:05:21,454] Trial 3588 pruned.
[I 2021-05-07 16:05:21,912] Trial 3589 pruned.
[I 2021-05-07 16:05:22,047] Trial 3590 pruned.
[I 2021-05-07 16:05:22,858] Trial 3591 pruned.
[I 2021-05-07 16:05:23,131] Trial 3592 pruned.
[I 2021-05-07 16:05:23,311] Trial 3593 pruned.
[I 2021-05-07 16:05:24,611] Trial 3594 pruned.
[I 2021-05-07 16:05:25,061] Trial 3595 pruned.
[I 2021-05-07 16:05:25,513] Trial 3596 pruned.
[I 2021-05-07 16:05:26,005] Trial 3597 pruned.
[I 2021-05-07 16:05:26,591] Trial 3598 pruned.
[I 2021-05-07 16:05:26,887] Trial 3599 pruned.
[I 2021-05-07 16:05:27,068] Trial 3600 pruned.
[I 2021-05-07 16:05:27,518] Trial 3601 pruned.
[I 2021-05-07 16:05:27,979] Trial 3602 pruned.
[I 2021-05-07 16:05:28,428] Trial 3603 pruned.
[I 2021-05-07 16:05:28,906] Trial 3604 pruned.
[I 2021-05-07 16:05:29,388] Trial 3605 pruned.
[I 2021-05-07 16:05:29,655] Trial 3606 pruned.
[I 2021-05-07 16:05:29,853] Trial 3607 pruned.
[I 2021-05-07 16:05:30,303] Trial 3608 pruned.
[I 2021-05-07 16:05:30,781] Trial 3609 pruned.
[I 2021-05-07 16:05:31,231] Trial 3610 pruned.
[I 2021-05-07 16:05:32,139] Trial 3611 pruned.
[I 2021-05-07 16:05:32,620] Trial 3612 pruned.
[I 2021-05-07 16:05:33,176] Trial 3613 pruned.
[I 2021-05-07 16:05:33,364] Trial 3614 pruned.
[I 2021-05-07 16:05:33,813] Trial 3615 pruned.
[I 2021-05-07 16:05:34,270] Trial 3616 pruned.
[I 2021-05-07 16:05:35,091] Trial 3617 pruned.
[I 2021-05-07 16:05:35,225] Trial 3618 pruned.
[I 2021-05-07 16:05:35,682] Trial 3619 pruned.
[I 2021-05-07 16:05:36,181] Trial 3620 pruned.
[I 2021-05-07 16:05:36,471] Trial 3621 pruned.
[I 2021-05-07 16:05:36,651] Trial 3622 pruned.
[I 2021-05-07 16:05:37,138] Trial 3623 pruned.
[I 2021-05-07 16:05:37,598] Trial 3624 pruned.
[I 2021-05-07 16:05:38,092] Trial 3625 pruned.
[I 2021-05-07 16:05:38,547] Trial 3626 pruned.
[I 2021-05-07 16:05:39,026] Trial 3627 pruned.
[I 2021-05-07 16:05:39,293] Trial 3628 pruned.
[I 2021-05-07 16:05:39,750] Trial 3629 pruned.
[I 2021-05-07 16:05:39,973] Trial 3630 pruned.
[I 2021-05-07 16:05:40,815] Trial 3631 pruned.
[I 2021-05-07 16:05:41,259] Trial 3632 pruned.
[I 2021-05-07 16:05:42,157] Trial 3633 pruned.
[I 2021-05-07 16:05:42,651] Trial 3634 pruned.
[I 2021-05-07 16:05:42,922] Trial 3635 pruned.
[I 2021-05-07 16:05:43,747] Trial 3636 pruned.
[I 2021-05-07 16:05:43,943] Trial 3637 pruned.
[I 2021-05-07 16:05:44,812] Trial 3638 pruned.
[I 2021-05-07 16:05:45,263] Trial 3639 pruned.
[I 2021-05-07 16:05:45,713] Trial 3640 pruned.
[I 2021-05-07 16:05:46,207] Trial 3641 pruned.
[I 2021-05-07 16:05:46,489] Trial 3642 pruned.
[I 2021-05-07 16:05:46,946] Trial 3643 pruned.
[I 2021-05-07 16:05:47,133] Trial 3644 pruned.
[I 2021-05-07 16:05:47,592] Trial 3645 pruned.
[I 2021-05-07 16:05:48,078] Trial 3646 pruned.
[I 2021-05-07 16:05:48,529] Trial 3647 pruned.
[I 2021-05-07 16:05:48,678] Trial 3648 pruned.
[I 2021-05-07 16:05:50,759] Trial 3649 pruned.
[I 2021-05-07 16:05:51,217] Trial 3650 pruned.
[I 2021-05-07 16:05:51,674] Trial 3651 pruned.
[I 2021-05-07 16:05:51,856] Trial 3652 pruned.
[I 2021-05-07 16:05:52,354] Trial 3653 pruned.
[I 2021-05-07 16:05:52,907] Trial 3654 pruned.
[I 2021-05-07 16:05:53,718] Trial 3655 pruned.
[I 2021-05-07 16:05:54,178] Trial 3656 pruned.
[I 2021-05-07 16:05:54,678] Trial 3657 pruned.
[I 2021-05-07 16:05:55,564] Trial 3658 pruned.
[I 2021-05-07 16:05:55,755] Trial 3659 pruned.
[I 2021-05-07 16:05:56,202] Trial 3660 pruned.
[I 2021-05-07 16:05:56,659] Trial 3661 pruned.
[I 2021-05-07 16:05:57,106] Trial 3662 pruned.
[I 2021-05-07 16:05:57,556] Trial 3663 pruned.
[I 2021-05-07 16:05:57,895] Trial 3664 pruned.
[I 2021-05-07 16:05:58,391] Trial 3665 pruned.
[I 2021-05-07 16:05:58,570] Trial 3666 pruned.
[I 2021-05-07 16:05:59,387] Trial 3667 pruned.
[I 2021-05-07 16:06:00,601] Trial 3668 pruned.
[I 2021-05-07 16:06:01,094] Trial 3669 pruned.
[I 2021-05-07 16:06:01,593] Trial 3670 pruned.
[I 2021-05-07 16:06:01,865] Trial 3671 pruned.
[I 2021-05-07 16:06:02,349] Trial 3672 pruned.
[I 2021-05-07 16:06:02,533] Trial 3673 pruned.
[I 2021-05-07 16:06:03,340] Trial 3674 pruned.
[I 2021-05-07 16:06:03,813] Trial 3675 pruned.
[I 2021-05-07 16:06:05,817] Trial 3676 pruned.
[I 2021-05-07 16:06:05,955] Trial 3677 pruned.
[I 2021-05-07 16:06:06,442] Trial 3678 pruned.
[I 2021-05-07 16:06:06,715] Trial 3679 pruned.
[I 2021-05-07 16:06:07,175] Trial 3680 pruned.
[I 2021-05-07 16:06:07,357] Trial 3681 pruned.
[I 2021-05-07 16:06:07,807] Trial 3682 pruned.
[I 2021-05-07 16:06:08,270] Trial 3683 pruned.
[I 2021-05-07 16:06:08,758] Trial 3684 pruned.
[I 2021-05-07 16:06:09,307] Trial 3685 pruned.
[I 2021-05-07 16:06:09,600] Trial 3686 pruned.
[I 2021-05-07 16:06:10,100] Trial 3687 pruned.
[I 2021-05-07 16:06:10,295] Trial 3688 pruned.
[I 2021-05-07 16:06:10,774] Trial 3689 pruned.
[I 2021-05-07 16:06:11,224] Trial 3690 pruned.
[I 2021-05-07 16:06:11,725] Trial 3691 pruned.
[I 2021-05-07 16:06:12,563] Trial 3692 pruned.
[I 2021-05-07 16:06:13,754] Trial 3693 pruned.
[I 2021-05-07 16:06:14,208] Trial 3694 pruned.
[I 2021-05-07 16:06:14,391] Trial 3695 pruned.
[I 2021-05-07 16:06:14,844] Trial 3696 pruned.
[I 2021-05-07 16:06:15,304] Trial 3697 pruned.
[I 2021-05-07 16:06:16,393] Trial 3698 pruned.
[I 2021-05-07 16:06:16,892] Trial 3699 pruned.
[I 2021-05-07 16:06:17,184] Trial 3700 pruned.
[I 2021-05-07 16:06:17,659] Trial 3701 pruned.
[I 2021-05-07 16:06:17,844] Trial 3702 pruned.
[I 2021-05-07 16:06:18,299] Trial 3703 pruned.
[I 2021-05-07 16:06:18,797] Trial 3704 pruned.
[I 2021-05-07 16:06:19,691] Trial 3705 pruned.
[I 2021-05-07 16:06:19,827] Trial 3706 pruned.
[I 2021-05-07 16:06:20,327] Trial 3707 pruned.
[I 2021-05-07 16:06:20,599] Trial 3708 pruned.
[I 2021-05-07 16:06:21,081] Trial 3709 pruned.
[I 2021-05-07 16:06:21,266] Trial 3710 pruned.
[I 2021-05-07 16:06:21,740] Trial 3711 pruned.
[I 2021-05-07 16:06:22,201] Trial 3712 pruned.
[I 2021-05-07 16:06:22,663] Trial 3713 pruned.
[I 2021-05-07 16:06:23,135] Trial 3714 pruned.
[I 2021-05-07 16:06:23,465] Trial 3715 pruned.
[I 2021-05-07 16:06:23,919] Trial 3716 pruned.
[I 2021-05-07 16:06:24,104] Trial 3717 pruned.
[I 2021-05-07 16:06:24,545] Trial 3718 pruned.
[I 2021-05-07 16:06:25,052] Trial 3719 pruned.
[I 2021-05-07 16:06:25,540] Trial 3720 pruned.
[I 2021-05-07 16:06:26,029] Trial 3721 pruned.
[I 2021-05-07 16:06:26,492] Trial 3722 pruned.
[I 2021-05-07 16:06:26,783] Trial 3723 pruned.
[I 2021-05-07 16:06:26,983] Trial 3724 pruned.
[I 2021-05-07 16:06:27,434] Trial 3725 pruned.
[I 2021-05-07 16:06:27,886] Trial 3726 pruned.
[I 2021-05-07 16:06:28,349] Trial 3727 pruned.
[I 2021-05-07 16:06:28,796] Trial 3728 pruned.
[I 2021-05-07 16:06:29,078] Trial 3729 pruned.
[I 2021-05-07 16:06:29,536] Trial 3730 pruned.
[I 2021-05-07 16:06:29,732] Trial 3731 pruned.
[I 2021-05-07 16:06:30,655] Trial 3732 pruned.
[I 2021-05-07 16:06:31,133] Trial 3733 pruned.
[I 2021-05-07 16:06:32,621] Trial 3734 pruned.
[I 2021-05-07 16:06:32,759] Trial 3735 pruned.
[I 2021-05-07 16:06:33,206] Trial 3736 pruned.
[I 2021-05-07 16:06:33,494] Trial 3737 pruned.
[I 2021-05-07 16:06:33,942] Trial 3738 pruned.
[I 2021-05-07 16:06:34,143] Trial 3739 pruned.
[I 2021-05-07 16:06:34,594] Trial 3740 pruned.
[I 2021-05-07 16:06:35,039] Trial 3741 pruned.
[I 2021-05-07 16:06:35,531] Trial 3742 pruned.
[I 2021-05-07 16:06:35,981] Trial 3743 pruned.
[I 2021-05-07 16:06:36,277] Trial 3744 pruned.
[I 2021-05-07 16:06:37,090] Trial 3745 pruned.
[I 2021-05-07 16:06:37,289] Trial 3746 pruned.
[I 2021-05-07 16:06:38,109] Trial 3747 pruned.
[I 2021-05-07 16:06:38,597] Trial 3748 pruned.
[I 2021-05-07 16:06:39,103] Trial 3749 pruned.
[I 2021-05-07 16:06:39,554] Trial 3750 pruned.
[I 2021-05-07 16:06:39,885] Trial 3751 pruned.
[I 2021-05-07 16:06:40,360] Trial 3752 pruned.
[I 2021-05-07 16:06:40,812] Trial 3753 pruned.
[I 2021-05-07 16:06:40,999] Trial 3754 pruned.
[I 2021-05-07 16:06:41,812] Trial 3755 pruned.
[I 2021-05-07 16:06:42,304] Trial 3756 pruned.
[I 2021-05-07 16:06:42,768] Trial 3757 pruned.
[I 2021-05-07 16:06:43,042] Trial 3758 pruned.
[I 2021-05-07 16:06:43,498] Trial 3759 pruned.
[I 2021-05-07 16:06:43,700] Trial 3760 pruned.
[I 2021-05-07 16:06:44,188] Trial 3761 pruned.
[I 2021-05-07 16:06:44,677] Trial 3762 pruned.
[I 2021-05-07 16:06:45,168] Trial 3763 pruned.
[I 2021-05-07 16:06:45,626] Trial 3764 pruned.
[I 2021-05-07 16:06:45,805] Trial 3765 pruned.
[I 2021-05-07 16:06:46,082] Trial 3766 pruned.
[I 2021-05-07 16:06:46,963] Trial 3767 pruned.
[I 2021-05-07 16:06:47,147] Trial 3768 pruned.
[I 2021-05-07 16:06:47,609] Trial 3769 pruned.
[I 2021-05-07 16:06:48,173] Trial 3770 pruned.
[I 2021-05-07 16:06:49,383] Trial 3771 pruned.
[I 2021-05-07 16:06:49,867] Trial 3772 pruned.
[I 2021-05-07 16:06:50,164] Trial 3773 pruned.
[I 2021-05-07 16:06:50,602] Trial 3774 pruned.
[I 2021-05-07 16:06:50,786] Trial 3775 pruned.
[I 2021-05-07 16:06:51,249] Trial 3776 pruned.
[I 2021-05-07 16:06:51,726] Trial 3777 pruned.
[I 2021-05-07 16:06:52,225] Trial 3778 pruned.
[I 2021-05-07 16:06:53,134] Trial 3779 pruned.
[I 2021-05-07 16:06:53,406] Trial 3780 pruned.
[I 2021-05-07 16:06:53,900] Trial 3781 pruned.
[I 2021-05-07 16:06:54,083] Trial 3782 pruned.
[I 2021-05-07 16:06:54,566] Trial 3783 pruned.
[I 2021-05-07 16:06:55,012] Trial 3784 pruned.
[I 2021-05-07 16:06:55,608] Trial 3785 pruned.
[I 2021-05-07 16:06:56,070] Trial 3786 pruned.
[I 2021-05-07 16:06:56,343] Trial 3787 pruned.
[I 2021-05-07 16:06:56,844] Trial 3788 pruned.
[I 2021-05-07 16:06:57,028] Trial 3789 pruned.
[I 2021-05-07 16:06:57,502] Trial 3790 pruned.
[I 2021-05-07 16:06:58,362] Trial 3791 pruned.
[I 2021-05-07 16:06:58,848] Trial 3792 pruned.
[I 2021-05-07 16:06:59,309] Trial 3793 pruned.
[I 2021-05-07 16:06:59,582] Trial 3794 pruned.
[I 2021-05-07 16:06:59,729] Trial 3795 pruned.
[I 2021-05-07 16:07:00,232] Trial 3796 pruned.
[I 2021-05-07 16:07:00,687] Trial 3797 pruned.
[I 2021-05-07 16:07:00,876] Trial 3798 pruned.
[I 2021-05-07 16:07:01,362] Trial 3799 pruned.
[I 2021-05-07 16:07:02,195] Trial 3800 pruned.
[I 2021-05-07 16:07:02,687] Trial 3801 pruned.
[I 2021-05-07 16:07:03,061] Trial 3802 pruned.
[I 2021-05-07 16:07:03,625] Trial 3803 pruned.
[I 2021-05-07 16:07:03,809] Trial 3804 pruned.
[I 2021-05-07 16:07:04,311] Trial 3805 pruned.
[I 2021-05-07 16:07:04,759] Trial 3806 pruned.
[I 2021-05-07 16:07:05,237] Trial 3807 pruned.
[I 2021-05-07 16:07:05,717] Trial 3808 pruned.
[I 2021-05-07 16:07:06,004] Trial 3809 pruned.
[I 2021-05-07 16:07:06,465] Trial 3810 pruned.
[I 2021-05-07 16:07:06,649] Trial 3811 pruned.
[I 2021-05-07 16:07:07,143] Trial 3812 pruned.
[I 2021-05-07 16:07:08,049] Trial 3813 pruned.
[I 2021-05-07 16:07:08,501] Trial 3814 pruned.
[I 2021-05-07 16:07:08,999] Trial 3815 pruned.
[I 2021-05-07 16:07:09,270] Trial 3816 pruned.
[I 2021-05-07 16:07:09,860] Trial 3817 pruned.
[I 2021-05-07 16:07:10,046] Trial 3818 pruned.
[I 2021-05-07 16:07:10,533] Trial 3819 pruned.
[I 2021-05-07 16:07:11,395] Trial 3820 pruned.
[I 2021-05-07 16:07:11,841] Trial 3821 pruned.
[I 2021-05-07 16:07:12,319] Trial 3822 pruned.
[I 2021-05-07 16:07:12,462] Trial 3823 pruned.
[I 2021-05-07 16:07:12,735] Trial 3824 pruned.
[I 2021-05-07 16:07:13,200] Trial 3825 pruned.
[I 2021-05-07 16:07:13,385] Trial 3826 pruned.
[I 2021-05-07 16:07:14,200] Trial 3827 pruned.
[I 2021-05-07 16:07:14,652] Trial 3828 pruned.
[I 2021-05-07 16:07:15,536] Trial 3829 pruned.
[I 2021-05-07 16:07:16,046] Trial 3830 pruned.
[I 2021-05-07 16:07:16,324] Trial 3831 pruned.
[I 2021-05-07 16:07:16,825] Trial 3832 pruned.
[I 2021-05-07 16:07:17,008] Trial 3833 pruned.
[I 2021-05-07 16:07:17,827] Trial 3834 pruned.
[I 2021-05-07 16:07:18,320] Trial 3835 pruned.
[I 2021-05-07 16:07:18,914] Trial 3836 pruned.
[I 2021-05-07 16:07:19,818] Trial 3837 pruned.
[I 2021-05-07 16:07:20,091] Trial 3838 pruned.
[I 2021-05-07 16:07:20,543] Trial 3839 pruned.
[I 2021-05-07 16:07:21,003] Trial 3840 pruned.
[I 2021-05-07 16:07:21,189] Trial 3841 pruned.
[I 2021-05-07 16:07:21,693] Trial 3842 pruned.
[I 2021-05-07 16:07:22,142] Trial 3843 pruned.
[I 2021-05-07 16:07:22,624] Trial 3844 pruned.
[I 2021-05-07 16:07:22,905] Trial 3845 pruned.
[I 2021-05-07 16:07:23,719] Trial 3846 pruned.
[I 2021-05-07 16:07:24,184] Trial 3847 pruned.
[I 2021-05-07 16:07:24,368] Trial 3848 pruned.
[I 2021-05-07 16:07:24,843] Trial 3849 pruned.
[I 2021-05-07 16:07:25,336] Trial 3850 pruned.
[I 2021-05-07 16:07:25,486] Trial 3851 pruned.
[I 2021-05-07 16:08:20,658] Trial 3852 finished with value: 157.83926391601562 and parameters: {'lr': 0.0019234417854082381, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 702, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:08:21,321] Trial 3853 pruned.
[I 2021-05-07 16:08:21,967] Trial 3854 pruned.
[I 2021-05-07 16:08:22,627] Trial 3855 pruned.
[I 2021-05-07 16:08:23,293] Trial 3856 pruned.
[I 2021-05-07 16:08:23,955] Trial 3857 pruned.
[I 2021-05-07 16:08:25,717] Trial 3858 pruned.
[I 2021-05-07 16:08:26,379] Trial 3859 pruned.
[I 2021-05-07 16:08:27,032] Trial 3860 pruned.
[I 2021-05-07 16:08:27,689] Trial 3861 pruned.
[I 2021-05-07 16:08:28,350] Trial 3862 pruned.
[I 2021-05-07 16:08:30,113] Trial 3863 pruned.
[I 2021-05-07 16:08:30,772] Trial 3864 pruned.
[I 2021-05-07 16:08:31,431] Trial 3865 pruned.
[I 2021-05-07 16:08:32,090] Trial 3866 pruned.
[I 2021-05-07 16:08:33,306] Trial 3867 pruned.
[I 2021-05-07 16:08:34,503] Trial 3868 pruned.
[I 2021-05-07 16:08:35,162] Trial 3869 pruned.
[I 2021-05-07 16:08:35,813] Trial 3870 pruned.
[I 2021-05-07 16:08:36,469] Trial 3871 pruned.
[I 2021-05-07 16:08:37,129] Trial 3872 pruned.
[I 2021-05-07 16:08:38,340] Trial 3873 pruned.
[I 2021-05-07 16:08:38,998] Trial 3874 pruned.
[I 2021-05-07 16:08:40,201] Trial 3875 pruned.
[I 2021-05-07 16:08:41,219] Trial 3876 pruned.
[I 2021-05-07 16:08:41,771] Trial 3877 pruned.
[I 2021-05-07 16:08:42,429] Trial 3878 pruned.
[I 2021-05-07 16:08:43,439] Trial 3879 pruned.
[I 2021-05-07 16:08:44,645] Trial 3880 pruned.
[I 2021-05-07 16:08:45,303] Trial 3881 pruned.
[I 2021-05-07 16:08:45,770] Trial 3882 pruned.
[I 2021-05-07 16:08:46,316] Trial 3883 pruned.
[I 2021-05-07 16:08:47,789] Trial 3884 pruned.
[I 2021-05-07 16:08:48,447] Trial 3885 pruned.
[I 2021-05-07 16:08:49,101] Trial 3886 pruned.
[I 2021-05-07 16:08:50,311] Trial 3887 pruned.
[I 2021-05-07 16:08:50,970] Trial 3888 pruned.
[I 2021-05-07 16:08:51,631] Trial 3889 pruned.
[I 2021-05-07 16:08:52,288] Trial 3890 pruned.
[I 2021-05-07 16:08:52,845] Trial 3891 pruned.
[I 2021-05-07 16:08:53,507] Trial 3892 pruned.
[I 2021-05-07 16:08:54,066] Trial 3893 pruned.
[I 2021-05-07 16:08:54,726] Trial 3894 pruned.
[I 2021-05-07 16:08:55,195] Trial 3895 pruned.
[I 2021-05-07 16:08:56,399] Trial 3896 pruned.
[I 2021-05-07 16:08:57,610] Trial 3897 pruned.
[I 2021-05-07 16:08:58,621] Trial 3898 pruned.
[I 2021-05-07 16:08:59,633] Trial 3899 pruned.
[I 2021-05-07 16:09:00,292] Trial 3900 pruned.
[I 2021-05-07 16:09:00,953] Trial 3901 pruned.
[I 2021-05-07 16:09:02,158] Trial 3902 pruned.
[I 2021-05-07 16:09:02,812] Trial 3903 pruned.
[I 2021-05-07 16:09:03,473] Trial 3904 pruned.
[I 2021-05-07 16:09:04,131] Trial 3905 pruned.
[I 2021-05-07 16:09:05,140] Trial 3906 pruned.
[I 2021-05-07 16:09:06,351] Trial 3907 pruned.
[I 2021-05-07 16:09:07,001] Trial 3908 pruned.
[I 2021-05-07 16:09:08,013] Trial 3909 pruned.
[I 2021-05-07 16:09:08,481] Trial 3910 pruned.
[I 2021-05-07 16:09:09,686] Trial 3911 pruned.
[I 2021-05-07 16:09:10,922] Trial 3912 pruned.
[I 2021-05-07 16:09:11,477] Trial 3913 pruned.
[I 2021-05-07 16:09:12,705] Trial 3914 pruned.
[I 2021-05-07 16:09:13,375] Trial 3915 pruned.
[I 2021-05-07 16:10:08,641] Trial 3916 finished with value: 159.53048706054688 and parameters: {'lr': 0.002346570374530504, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 692, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear', 'HL3_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:10:09,850] Trial 3917 pruned.
[I 2021-05-07 16:10:10,506] Trial 3918 pruned.
[I 2021-05-07 16:10:11,159] Trial 3919 pruned.
[I 2021-05-07 16:10:11,491] Trial 3920 pruned.
[I 2021-05-07 16:10:12,155] Trial 3921 pruned.
[I 2021-05-07 16:10:12,816] Trial 3922 pruned.
[I 2021-05-07 16:10:13,473] Trial 3923 pruned.
[I 2021-05-07 16:10:14,135] Trial 3924 pruned.
[I 2021-05-07 16:10:14,797] Trial 3925 pruned.
[I 2021-05-07 16:10:15,183] Trial 3926 pruned.
[I 2021-05-07 16:10:15,849] Trial 3927 pruned.
[I 2021-05-07 16:10:16,520] Trial 3928 pruned.
[I 2021-05-07 16:10:17,751] Trial 3929 pruned.
[I 2021-05-07 16:10:18,414] Trial 3930 pruned.
[I 2021-05-07 16:10:19,630] Trial 3931 pruned.
[I 2021-05-07 16:10:20,016] Trial 3932 pruned.
[I 2021-05-07 16:10:20,665] Trial 3933 pruned.
[I 2021-05-07 16:10:21,321] Trial 3934 pruned.
[I 2021-05-07 16:10:21,975] Trial 3935 pruned.
[I 2021-05-07 16:10:23,566] Trial 3936 pruned.
[I 2021-05-07 16:10:24,230] Trial 3937 pruned.
[I 2021-05-07 16:10:24,881] Trial 3938 pruned.
[I 2021-05-07 16:10:25,272] Trial 3939 pruned.
[I 2021-05-07 16:10:25,936] Trial 3940 pruned.
[I 2021-05-07 16:10:29,316] Trial 3941 pruned.
[I 2021-05-07 16:10:29,983] Trial 3942 pruned.
[I 2021-05-07 16:10:30,639] Trial 3943 pruned.
[I 2021-05-07 16:10:30,826] Trial 3944 pruned.
[I 2021-05-07 16:10:31,468] Trial 3945 pruned.
[I 2021-05-07 16:10:31,863] Trial 3946 pruned.
[I 2021-05-07 16:10:32,522] Trial 3947 pruned.
[I 2021-05-07 16:10:33,174] Trial 3948 pruned.
[I 2021-05-07 16:10:33,833] Trial 3949 pruned.
[I 2021-05-07 16:10:34,483] Trial 3950 pruned.
[I 2021-05-07 16:10:35,147] Trial 3951 pruned.
[I 2021-05-07 16:10:35,535] Trial 3952 pruned.
[I 2021-05-07 16:10:36,751] Trial 3953 pruned.
[I 2021-05-07 16:10:37,302] Trial 3954 pruned.
[I 2021-05-07 16:10:37,768] Trial 3955 pruned.
[I 2021-05-07 16:10:38,427] Trial 3956 pruned.
[I 2021-05-07 16:10:39,080] Trial 3957 pruned.
[I 2021-05-07 16:10:39,470] Trial 3958 pruned.
[I 2021-05-07 16:10:40,127] Trial 3959 pruned.
[I 2021-05-07 16:10:41,309] Trial 3960 pruned.
[I 2021-05-07 16:10:41,975] Trial 3961 pruned.
[I 2021-05-07 16:10:42,635] Trial 3962 pruned.
[I 2021-05-07 16:10:44,386] Trial 3963 pruned.
[I 2021-05-07 16:10:45,042] Trial 3964 pruned.
[I 2021-05-07 16:10:45,422] Trial 3965 pruned.
[I 2021-05-07 16:10:45,891] Trial 3966 pruned.
[I 2021-05-07 16:10:47,094] Trial 3967 pruned.
[I 2021-05-07 16:10:48,312] Trial 3968 pruned.
[I 2021-05-07 16:10:48,867] Trial 3969 pruned.
[I 2021-05-07 16:10:50,083] Trial 3970 pruned.
[I 2021-05-07 16:10:50,467] Trial 3971 pruned.
[I 2021-05-07 16:10:51,140] Trial 3972 pruned.
[I 2021-05-07 16:10:51,323] Trial 3973 pruned.
[I 2021-05-07 16:10:51,994] Trial 3974 pruned.
[I 2021-05-07 16:11:41,908] Trial 3975 finished with value: 162.5379180908203 and parameters: {'lr': 0.001923449206146624, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 956, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:11:42,514] Trial 3976 pruned.
[I 2021-05-07 16:11:43,626] Trial 3977 pruned.
[I 2021-05-07 16:11:44,231] Trial 3978 pruned.
[I 2021-05-07 16:11:44,586] Trial 3979 pruned.
[I 2021-05-07 16:11:46,187] Trial 3980 pruned.
[I 2021-05-07 16:11:47,780] Trial 3981 pruned.
[I 2021-05-07 16:11:48,391] Trial 3982 pruned.
[I 2021-05-07 16:11:48,995] Trial 3983 pruned.
[I 2021-05-07 16:11:49,593] Trial 3984 pruned.
[I 2021-05-07 16:11:49,946] Trial 3985 pruned.
[I 2021-05-07 16:11:50,555] Trial 3986 pruned.
[I 2021-05-07 16:12:40,287] Trial 3987 finished with value: 160.49578857421875 and parameters: {'lr': 0.0014352764746750437, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 954, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:12:41,405] Trial 3988 pruned.
[I 2021-05-07 16:12:42,508] Trial 3989 pruned.
[I 2021-05-07 16:12:43,633] Trial 3990 pruned.
[I 2021-05-07 16:12:44,230] Trial 3991 pruned.
[I 2021-05-07 16:12:47,391] Trial 3992 pruned.
[I 2021-05-07 16:12:47,990] Trial 3993 pruned.
[I 2021-05-07 16:12:49,095] Trial 3994 pruned.
[I 2021-05-07 16:12:49,705] Trial 3995 pruned.
[I 2021-05-07 16:13:40,211] Trial 3996 finished with value: 159.5423126220703 and parameters: {'lr': 0.0015079068455494898, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 992, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:13:40,823] Trial 3997 pruned.
[I 2021-05-07 16:13:42,055] Trial 3998 pruned.
[I 2021-05-07 16:13:43,177] Trial 3999 pruned.
[I 2021-05-07 16:13:44,285] Trial 4000 pruned.
[I 2021-05-07 16:13:44,900] Trial 4001 pruned.
[I 2021-05-07 16:13:45,507] Trial 4002 pruned.
[I 2021-05-07 16:13:46,115] Trial 4003 pruned.
[I 2021-05-07 16:13:46,712] Trial 4004 pruned.
[I 2021-05-07 16:13:47,820] Trial 4005 pruned.
[I 2021-05-07 16:13:48,435] Trial 4006 pruned.
[I 2021-05-07 16:13:49,042] Trial 4007 pruned.
[I 2021-05-07 16:14:45,680] Trial 4008 finished with value: 162.51307678222656 and parameters: {'lr': 0.0015662141873263524, 'batch_size': 16, 'n_layers': 4, 'neurons_HL1': 1012, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu', 'HL3_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:14:46,292] Trial 4009 pruned.
[I 2021-05-07 16:15:37,245] Trial 4010 finished with value: 172.43212890625 and parameters: {'lr': 0.0012762349476306328, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1000, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:15:37,853] Trial 4011 pruned.
[I 2021-05-07 16:15:38,456] Trial 4012 pruned.
[I 2021-05-07 16:16:29,666] Trial 4013 finished with value: 158.3479766845703 and parameters: {'lr': 0.0012381517825452383, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 3521 with value: 157.6289520263672.
[I 2021-05-07 16:16:30,279] Trial 4014 pruned.
[I 2021-05-07 16:16:30,891] Trial 4015 pruned.
[I 2021-05-07 16:16:31,506] Trial 4016 pruned.
[I 2021-05-07 16:16:32,118] Trial 4017 pruned.
[I 2021-05-07 16:17:23,019] Trial 4018 finished with value: 156.53628540039062 and parameters: {'lr': 0.0011612548928246734, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:17:23,634] Trial 4019 pruned.
[I 2021-05-07 16:17:24,239] Trial 4020 pruned.
[I 2021-05-07 16:17:24,846] Trial 4021 pruned.
[I 2021-05-07 16:17:25,455] Trial 4022 pruned.
[I 2021-05-07 16:17:26,069] Trial 4023 pruned.
[I 2021-05-07 16:17:26,680] Trial 4024 pruned.
[I 2021-05-07 16:17:27,288] Trial 4025 pruned.
[I 2021-05-07 16:17:27,902] Trial 4026 pruned.
[I 2021-05-07 16:18:19,071] Trial 4027 finished with value: 157.1497039794922 and parameters: {'lr': 0.001079542022240627, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:18:20,191] Trial 4028 pruned.
[I 2021-05-07 16:18:20,802] Trial 4029 pruned.
[I 2021-05-07 16:18:21,410] Trial 4030 pruned.
[I 2021-05-07 16:18:22,546] Trial 4031 pruned.
[I 2021-05-07 16:18:23,164] Trial 4032 pruned.
[I 2021-05-07 16:18:23,769] Trial 4033 pruned.
[I 2021-05-07 16:18:24,903] Trial 4034 pruned.
[I 2021-05-07 16:18:25,535] Trial 4035 pruned.
[I 2021-05-07 16:18:27,160] Trial 4036 pruned.
[I 2021-05-07 16:18:28,278] Trial 4037 pruned.
[I 2021-05-07 16:18:28,891] Trial 4038 pruned.
[I 2021-05-07 16:18:29,501] Trial 4039 pruned.
[I 2021-05-07 16:18:30,615] Trial 4040 pruned.
[I 2021-05-07 16:19:21,778] Trial 4041 finished with value: 165.09117126464844 and parameters: {'lr': 0.0010137163953595743, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:19:22,395] Trial 4042 pruned.
[I 2021-05-07 16:19:23,006] Trial 4043 pruned.
[I 2021-05-07 16:19:23,620] Trial 4044 pruned.
[I 2021-05-07 16:19:24,234] Trial 4045 pruned.
[I 2021-05-07 16:19:24,843] Trial 4046 pruned.
[I 2021-05-07 16:19:25,456] Trial 4047 pruned.
[I 2021-05-07 16:19:26,582] Trial 4048 pruned.
[I 2021-05-07 16:19:27,192] Trial 4049 pruned.
[I 2021-05-07 16:19:27,792] Trial 4050 pruned.
[I 2021-05-07 16:19:28,407] Trial 4051 pruned.
[I 2021-05-07 16:19:29,020] Trial 4052 pruned.
[I 2021-05-07 16:19:29,628] Trial 4053 pruned.
[I 2021-05-07 16:19:30,746] Trial 4054 pruned.
[I 2021-05-07 16:19:31,859] Trial 4055 pruned.
[I 2021-05-07 16:19:32,984] Trial 4056 pruned.
[I 2021-05-07 16:19:34,607] Trial 4057 pruned.
[I 2021-05-07 16:19:35,215] Trial 4058 pruned.
[I 2021-05-07 16:19:36,336] Trial 4059 pruned.
[I 2021-05-07 16:19:36,946] Trial 4060 pruned.
[I 2021-05-07 16:19:37,561] Trial 4061 pruned.
[I 2021-05-07 16:19:38,172] Trial 4062 pruned.
[I 2021-05-07 16:20:28,837] Trial 4063 finished with value: 162.81016540527344 and parameters: {'lr': 0.0013275306527502958, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 994, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:20:30,447] Trial 4064 pruned.
[I 2021-05-07 16:20:31,064] Trial 4065 pruned.
[I 2021-05-07 16:20:31,688] Trial 4066 pruned.
[I 2021-05-07 16:20:31,864] Trial 4067 pruned.
[I 2021-05-07 16:21:22,719] Trial 4068 finished with value: 160.80911254882812 and parameters: {'lr': 0.001077454922866405, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:21:23,335] Trial 4069 pruned.
[I 2021-05-07 16:21:23,939] Trial 4070 pruned.
[I 2021-05-07 16:21:24,551] Trial 4071 pruned.
[I 2021-05-07 16:21:25,164] Trial 4072 pruned.
[I 2021-05-07 16:21:26,276] Trial 4073 pruned.
[I 2021-05-07 16:21:26,884] Trial 4074 pruned.
[I 2021-05-07 16:22:18,066] Trial 4075 finished with value: 158.1580047607422 and parameters: {'lr': 0.0011286790786705545, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1018, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:22:19,197] Trial 4076 pruned.
[I 2021-05-07 16:22:20,301] Trial 4077 pruned.
[I 2021-05-07 16:22:20,917] Trial 4078 pruned.
[I 2021-05-07 16:22:21,543] Trial 4079 pruned.
[I 2021-05-07 16:22:22,156] Trial 4080 pruned.
[I 2021-05-07 16:22:23,280] Trial 4081 pruned.
[I 2021-05-07 16:22:23,892] Trial 4082 pruned.
[I 2021-05-07 16:22:24,511] Trial 4083 pruned.
[I 2021-05-07 16:22:25,120] Trial 4084 pruned.
[I 2021-05-07 16:22:25,739] Trial 4085 pruned.
[I 2021-05-07 16:22:26,355] Trial 4086 pruned.
[I 2021-05-07 16:22:26,962] Trial 4087 pruned.
[I 2021-05-07 16:22:32,676] Trial 4088 pruned.
[I 2021-05-07 16:22:33,290] Trial 4089 pruned.
[I 2021-05-07 16:22:33,903] Trial 4090 pruned.
[I 2021-05-07 16:22:34,079] Trial 4091 pruned.
[I 2021-05-07 16:22:35,206] Trial 4092 pruned.
[I 2021-05-07 16:22:35,825] Trial 4093 pruned.
[I 2021-05-07 16:22:36,439] Trial 4094 pruned.
[I 2021-05-07 16:22:37,053] Trial 4095 pruned.
[I 2021-05-07 16:22:37,669] Trial 4096 pruned.
[I 2021-05-07 16:22:38,286] Trial 4097 pruned.
[I 2021-05-07 16:22:39,385] Trial 4098 pruned.
[I 2021-05-07 16:22:40,994] Trial 4099 pruned.
[I 2021-05-07 16:22:41,603] Trial 4100 pruned.
[I 2021-05-07 16:22:42,214] Trial 4101 pruned.
[I 2021-05-07 16:22:42,827] Trial 4102 pruned.
[I 2021-05-07 16:22:43,439] Trial 4103 pruned.
[I 2021-05-07 16:23:34,364] Trial 4104 finished with value: 164.0568084716797 and parameters: {'lr': 0.001314576818473244, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:23:34,965] Trial 4105 pruned.
[I 2021-05-07 16:23:35,582] Trial 4106 pruned.
[I 2021-05-07 16:23:36,191] Trial 4107 pruned.
[I 2021-05-07 16:23:36,806] Trial 4108 pruned.
[I 2021-05-07 16:23:37,412] Trial 4109 pruned.
[I 2021-05-07 16:23:38,028] Trial 4110 pruned.
[I 2021-05-07 16:23:38,645] Trial 4111 pruned.
[I 2021-05-07 16:23:39,250] Trial 4112 pruned.
[I 2021-05-07 16:23:39,859] Trial 4113 pruned.
[I 2021-05-07 16:23:40,036] Trial 4114 pruned.
[I 2021-05-07 16:23:41,142] Trial 4115 pruned.
[I 2021-05-07 16:23:41,751] Trial 4116 pruned.
[I 2021-05-07 16:23:42,362] Trial 4117 pruned.
[I 2021-05-07 16:23:42,973] Trial 4118 pruned.
[I 2021-05-07 16:23:43,582] Trial 4119 pruned.
[I 2021-05-07 16:23:44,195] Trial 4120 pruned.
[I 2021-05-07 16:23:44,806] Trial 4121 pruned.
[I 2021-05-07 16:24:35,203] Trial 4122 finished with value: 157.45028686523438 and parameters: {'lr': 0.001045771822134947, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 978, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4018 with value: 156.53628540039062.
[I 2021-05-07 16:24:35,819] Trial 4123 pruned.
[I 2021-05-07 16:24:36,435] Trial 4124 pruned.
[I 2021-05-07 16:24:37,047] Trial 4125 pruned.
[I 2021-05-07 16:24:37,652] Trial 4126 pruned.
[I 2021-05-07 16:24:38,264] Trial 4127 pruned.
[I 2021-05-07 16:24:38,875] Trial 4128 pruned.
[I 2021-05-07 16:24:39,983] Trial 4129 pruned.
[I 2021-05-07 16:24:40,596] Trial 4130 pruned.
[I 2021-05-07 16:24:41,206] Trial 4131 pruned.
[I 2021-05-07 16:24:41,823] Trial 4132 pruned.
[I 2021-05-07 16:24:42,428] Trial 4133 pruned.
[I 2021-05-07 16:24:43,045] Trial 4134 pruned.
[I 2021-05-07 16:24:43,664] Trial 4135 pruned.
[I 2021-05-07 16:25:34,546] Trial 4136 finished with value: 156.2919464111328 and parameters: {'lr': 0.0012074602248986163, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:25:35,154] Trial 4137 pruned.
[I 2021-05-07 16:25:35,331] Trial 4138 pruned.
[I 2021-05-07 16:25:35,955] Trial 4139 pruned.
[I 2021-05-07 16:25:36,572] Trial 4140 pruned.
[I 2021-05-07 16:25:37,182] Trial 4141 pruned.
[I 2021-05-07 16:25:37,798] Trial 4142 pruned.
[I 2021-05-07 16:25:38,405] Trial 4143 pruned.
[I 2021-05-07 16:25:40,017] Trial 4144 pruned.
[I 2021-05-07 16:25:40,637] Trial 4145 pruned.
[I 2021-05-07 16:25:41,246] Trial 4146 pruned.
[I 2021-05-07 16:25:41,865] Trial 4147 pruned.
[I 2021-05-07 16:25:42,985] Trial 4148 pruned.
[I 2021-05-07 16:25:43,601] Trial 4149 pruned.
[I 2021-05-07 16:25:44,222] Trial 4150 pruned.
[I 2021-05-07 16:25:44,865] Trial 4151 pruned.
[I 2021-05-07 16:25:45,990] Trial 4152 pruned.
[I 2021-05-07 16:25:46,600] Trial 4153 pruned.
[I 2021-05-07 16:25:47,215] Trial 4154 pruned.
[I 2021-05-07 16:25:47,825] Trial 4155 pruned.
[I 2021-05-07 16:25:48,442] Trial 4156 pruned.
[I 2021-05-07 16:25:49,052] Trial 4157 pruned.
[I 2021-05-07 16:25:50,170] Trial 4158 pruned.
[I 2021-05-07 16:25:50,761] Trial 4159 pruned.
[I 2021-05-07 16:25:51,876] Trial 4160 pruned.
[I 2021-05-07 16:26:42,192] Trial 4161 finished with value: 156.7040252685547 and parameters: {'lr': 0.0011126228623519285, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 974, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:26:42,798] Trial 4162 pruned.
[I 2021-05-07 16:26:43,414] Trial 4163 pruned.
[I 2021-05-07 16:26:44,029] Trial 4164 pruned.
[I 2021-05-07 16:26:44,632] Trial 4165 pruned.
[I 2021-05-07 16:26:45,239] Trial 4166 pruned.
[I 2021-05-07 16:26:46,367] Trial 4167 pruned.
[I 2021-05-07 16:27:36,653] Trial 4168 finished with value: 158.78932189941406 and parameters: {'lr': 0.001087203809525132, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 972, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:27:37,262] Trial 4169 pruned.
[I 2021-05-07 16:27:38,377] Trial 4170 pruned.
[I 2021-05-07 16:27:38,981] Trial 4171 pruned.
[I 2021-05-07 16:27:39,590] Trial 4172 pruned.
[I 2021-05-07 16:27:40,208] Trial 4173 pruned.
[I 2021-05-07 16:27:40,816] Trial 4174 pruned.
[I 2021-05-07 16:27:41,433] Trial 4175 pruned.
[I 2021-05-07 16:27:42,035] Trial 4176 pruned.
[I 2021-05-07 16:27:43,155] Trial 4177 pruned.
[I 2021-05-07 16:27:43,766] Trial 4178 pruned.
[I 2021-05-07 16:27:44,374] Trial 4179 pruned.
[I 2021-05-07 16:27:44,989] Trial 4180 pruned.
[I 2021-05-07 16:27:46,117] Trial 4181 pruned.
[I 2021-05-07 16:27:46,726] Trial 4182 pruned.
[I 2021-05-07 16:27:47,837] Trial 4183 pruned.
[I 2021-05-07 16:27:48,452] Trial 4184 pruned.
[I 2021-05-07 16:27:49,067] Trial 4185 pruned.
[I 2021-05-07 16:27:50,187] Trial 4186 pruned.
[I 2021-05-07 16:27:50,792] Trial 4187 pruned.
[I 2021-05-07 16:27:51,396] Trial 4188 pruned.
[I 2021-05-07 16:27:52,998] Trial 4189 pruned.
[I 2021-05-07 16:27:53,608] Trial 4190 pruned.
[I 2021-05-07 16:27:54,216] Trial 4191 pruned.
[I 2021-05-07 16:27:54,831] Trial 4192 pruned.
[I 2021-05-07 16:27:55,439] Trial 4193 pruned.
[I 2021-05-07 16:27:56,045] Trial 4194 pruned.
[I 2021-05-07 16:27:56,664] Trial 4195 pruned.
[I 2021-05-07 16:27:57,276] Trial 4196 pruned.
[I 2021-05-07 16:27:57,895] Trial 4197 pruned.
[I 2021-05-07 16:27:58,500] Trial 4198 pruned.
[I 2021-05-07 16:27:59,112] Trial 4199 pruned.
[I 2021-05-07 16:28:00,227] Trial 4200 pruned.
[I 2021-05-07 16:28:00,830] Trial 4201 pruned.
[I 2021-05-07 16:28:01,435] Trial 4202 pruned.
[I 2021-05-07 16:28:52,303] Trial 4203 finished with value: 158.37330627441406 and parameters: {'lr': 0.0010042785002534616, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 996, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:28:52,908] Trial 4204 pruned.
[I 2021-05-07 16:28:54,011] Trial 4205 pruned.
[I 2021-05-07 16:28:54,614] Trial 4206 pruned.
[I 2021-05-07 16:28:55,224] Trial 4207 pruned.
[I 2021-05-07 16:28:55,829] Trial 4208 pruned.
[I 2021-05-07 16:28:56,438] Trial 4209 pruned.
[I 2021-05-07 16:28:57,049] Trial 4210 pruned.
[I 2021-05-07 16:28:57,670] Trial 4211 pruned.
[I 2021-05-07 16:28:58,284] Trial 4212 pruned.
[I 2021-05-07 16:28:58,886] Trial 4213 pruned.
[I 2021-05-07 16:28:59,997] Trial 4214 pruned.
[I 2021-05-07 16:29:00,613] Trial 4215 pruned.
[I 2021-05-07 16:29:01,218] Trial 4216 pruned.
[I 2021-05-07 16:29:01,831] Trial 4217 pruned.
[I 2021-05-07 16:29:02,434] Trial 4218 pruned.
[I 2021-05-07 16:29:04,037] Trial 4219 pruned.
[I 2021-05-07 16:29:04,641] Trial 4220 pruned.
[I 2021-05-07 16:29:05,260] Trial 4221 pruned.
[I 2021-05-07 16:29:55,643] Trial 4222 finished with value: 162.34300231933594 and parameters: {'lr': 0.0011634946142685483, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 986, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:29:56,748] Trial 4223 pruned.
[I 2021-05-07 16:29:57,367] Trial 4224 pruned.
[I 2021-05-07 16:29:58,980] Trial 4225 pruned.
[I 2021-05-07 16:29:59,595] Trial 4226 pruned.
[I 2021-05-07 16:30:00,233] Trial 4227 pruned.
[I 2021-05-07 16:30:51,288] Trial 4228 finished with value: 161.7392578125 and parameters: {'lr': 0.0012082767028841817, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:30:51,903] Trial 4229 pruned.
[I 2021-05-07 16:30:52,516] Trial 4230 pruned.
[I 2021-05-07 16:30:53,641] Trial 4231 pruned.
[I 2021-05-07 16:30:54,255] Trial 4232 pruned.
[I 2021-05-07 16:30:54,436] Trial 4233 pruned.
[I 2021-05-07 16:30:55,064] Trial 4234 pruned.
[I 2021-05-07 16:30:55,678] Trial 4235 pruned.
[I 2021-05-07 16:30:56,294] Trial 4236 pruned.
[I 2021-05-07 16:30:56,913] Trial 4237 pruned.
[I 2021-05-07 16:30:57,530] Trial 4238 pruned.
[I 2021-05-07 16:30:58,141] Trial 4239 pruned.
[I 2021-05-07 16:30:59,270] Trial 4240 pruned.
[I 2021-05-07 16:31:00,389] Trial 4241 pruned.
[I 2021-05-07 16:31:01,004] Trial 4242 pruned.
[I 2021-05-07 16:31:01,606] Trial 4243 pruned.
[I 2021-05-07 16:31:02,218] Trial 4244 pruned.
[I 2021-05-07 16:31:03,343] Trial 4245 pruned.
[I 2021-05-07 16:31:04,470] Trial 4246 pruned.
[I 2021-05-07 16:31:05,588] Trial 4247 pruned.
[I 2021-05-07 16:31:06,204] Trial 4248 pruned.
[I 2021-05-07 16:31:06,822] Trial 4249 pruned.
[I 2021-05-07 16:31:07,439] Trial 4250 pruned.
[I 2021-05-07 16:31:08,057] Trial 4251 pruned.
[I 2021-05-07 16:31:08,671] Trial 4252 pruned.
[I 2021-05-07 16:31:09,792] Trial 4253 pruned.
[I 2021-05-07 16:31:10,399] Trial 4254 pruned.
[I 2021-05-07 16:31:11,010] Trial 4255 pruned.
[I 2021-05-07 16:31:11,625] Trial 4256 pruned.
[I 2021-05-07 16:31:12,235] Trial 4257 pruned.
[I 2021-05-07 16:31:12,854] Trial 4258 pruned.
[I 2021-05-07 16:31:13,033] Trial 4259 pruned.
[I 2021-05-07 16:31:13,640] Trial 4260 pruned.
[I 2021-05-07 16:31:14,255] Trial 4261 pruned.
[I 2021-05-07 16:31:14,870] Trial 4262 pruned.
[I 2021-05-07 16:31:15,479] Trial 4263 pruned.
[I 2021-05-07 16:31:16,096] Trial 4264 pruned.
[I 2021-05-07 16:31:16,713] Trial 4265 pruned.
[I 2021-05-07 16:31:17,332] Trial 4266 pruned.
[I 2021-05-07 16:31:18,944] Trial 4267 pruned.
[I 2021-05-07 16:31:19,562] Trial 4268 pruned.
[I 2021-05-07 16:31:20,164] Trial 4269 pruned.
[I 2021-05-07 16:31:20,779] Trial 4270 pruned.
[I 2021-05-07 16:31:21,396] Trial 4271 pruned.
[I 2021-05-07 16:31:22,012] Trial 4272 pruned.
[I 2021-05-07 16:31:23,139] Trial 4273 pruned.
[I 2021-05-07 16:31:23,738] Trial 4274 pruned.
[I 2021-05-07 16:31:24,349] Trial 4275 pruned.
[I 2021-05-07 16:31:25,458] Trial 4276 pruned.
[I 2021-05-07 16:32:16,483] Trial 4277 finished with value: 158.64430236816406 and parameters: {'lr': 0.0013893135307605506, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:32:17,104] Trial 4278 pruned.
[I 2021-05-07 16:32:17,725] Trial 4279 pruned.
[I 2021-05-07 16:32:18,355] Trial 4280 pruned.
[I 2021-05-07 16:32:18,984] Trial 4281 pruned.
[I 2021-05-07 16:32:19,164] Trial 4282 pruned.
[I 2021-05-07 16:32:19,778] Trial 4283 pruned.
[I 2021-05-07 16:32:20,397] Trial 4284 pruned.
[I 2021-05-07 16:32:21,022] Trial 4285 pruned.
[I 2021-05-07 16:32:21,636] Trial 4286 pruned.
[I 2021-05-07 16:33:11,804] Trial 4287 finished with value: 161.83514404296875 and parameters: {'lr': 0.0011713498927191132, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 970, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:33:12,935] Trial 4288 pruned.
[I 2021-05-07 16:33:17,619] Trial 4289 pruned.
[I 2021-05-07 16:33:18,242] Trial 4290 pruned.
[I 2021-05-07 16:33:18,856] Trial 4291 pruned.
[I 2021-05-07 16:33:19,986] Trial 4292 pruned.
[I 2021-05-07 16:33:20,623] Trial 4293 pruned.
[I 2021-05-07 16:33:21,235] Trial 4294 pruned.
[I 2021-05-07 16:33:22,839] Trial 4295 pruned.
[I 2021-05-07 16:33:23,953] Trial 4296 pruned.
[I 2021-05-07 16:33:24,575] Trial 4297 pruned.
[I 2021-05-07 16:33:25,169] Trial 4298 pruned.
[I 2021-05-07 16:33:25,780] Trial 4299 pruned.
[I 2021-05-07 16:33:26,392] Trial 4300 pruned.
[I 2021-05-07 16:33:27,003] Trial 4301 pruned.
[I 2021-05-07 16:33:27,619] Trial 4302 pruned.
[I 2021-05-07 16:33:28,234] Trial 4303 pruned.
[I 2021-05-07 16:33:28,855] Trial 4304 pruned.
[I 2021-05-07 16:33:29,037] Trial 4305 pruned.
[I 2021-05-07 16:33:29,654] Trial 4306 pruned.
[I 2021-05-07 16:33:30,263] Trial 4307 pruned.
[I 2021-05-07 16:33:30,880] Trial 4308 pruned.
[I 2021-05-07 16:34:21,336] Trial 4309 finished with value: 159.7611541748047 and parameters: {'lr': 0.0014049531755051742, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 984, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:34:22,440] Trial 4310 pruned.
[I 2021-05-07 16:34:23,060] Trial 4311 pruned.
[I 2021-05-07 16:34:23,666] Trial 4312 pruned.
[I 2021-05-07 16:34:24,275] Trial 4313 pruned.
[I 2021-05-07 16:34:25,381] Trial 4314 pruned.
[I 2021-05-07 16:34:26,004] Trial 4315 pruned.
[I 2021-05-07 16:34:26,625] Trial 4316 pruned.
[I 2021-05-07 16:34:27,231] Trial 4317 pruned.
[I 2021-05-07 16:34:27,843] Trial 4318 pruned.
[I 2021-05-07 16:34:28,459] Trial 4319 pruned.
[I 2021-05-07 16:34:29,066] Trial 4320 pruned.
[I 2021-05-07 16:34:29,681] Trial 4321 pruned.
[I 2021-05-07 16:34:30,286] Trial 4322 pruned.
[I 2021-05-07 16:34:30,902] Trial 4323 pruned.
[I 2021-05-07 16:34:31,516] Trial 4324 pruned.
[I 2021-05-07 16:34:32,138] Trial 4325 pruned.
[I 2021-05-07 16:34:32,737] Trial 4326 pruned.
[I 2021-05-07 16:34:33,353] Trial 4327 pruned.
[I 2021-05-07 16:34:34,478] Trial 4328 pruned.
[I 2021-05-07 16:34:35,086] Trial 4329 pruned.
[I 2021-05-07 16:34:35,268] Trial 4330 pruned.
[I 2021-05-07 16:34:35,883] Trial 4331 pruned.
[I 2021-05-07 16:34:36,502] Trial 4332 pruned.
[I 2021-05-07 16:34:37,115] Trial 4333 pruned.
[I 2021-05-07 16:34:37,731] Trial 4334 pruned.
[I 2021-05-07 16:34:38,345] Trial 4335 pruned.
[I 2021-05-07 16:34:38,959] Trial 4336 pruned.
[I 2021-05-07 16:34:40,082] Trial 4337 pruned.
[I 2021-05-07 16:34:41,188] Trial 4338 pruned.
[I 2021-05-07 16:34:42,313] Trial 4339 pruned.
[I 2021-05-07 16:34:43,425] Trial 4340 pruned.
[I 2021-05-07 16:34:44,034] Trial 4341 pruned.
[I 2021-05-07 16:34:44,648] Trial 4342 pruned.
[I 2021-05-07 16:34:45,264] Trial 4343 pruned.
[I 2021-05-07 16:35:36,381] Trial 4344 finished with value: 160.900390625 and parameters: {'lr': 0.0010902322701836077, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:35:37,003] Trial 4345 pruned.
[I 2021-05-07 16:35:37,619] Trial 4346 pruned.
[I 2021-05-07 16:35:38,571] Trial 4347 pruned.
[I 2021-05-07 16:35:39,184] Trial 4348 pruned.
[I 2021-05-07 16:35:39,800] Trial 4349 pruned.
[I 2021-05-07 16:35:40,412] Trial 4350 pruned.
[I 2021-05-07 16:35:41,023] Trial 4351 pruned.
[I 2021-05-07 16:35:42,147] Trial 4352 pruned.
[I 2021-05-07 16:35:42,756] Trial 4353 pruned.
[I 2021-05-07 16:35:43,370] Trial 4354 pruned.
[I 2021-05-07 16:35:44,472] Trial 4355 pruned.
[I 2021-05-07 16:35:45,086] Trial 4356 pruned.
[I 2021-05-07 16:35:45,268] Trial 4357 pruned.
[I 2021-05-07 16:35:46,397] Trial 4358 pruned.
[I 2021-05-07 16:35:47,010] Trial 4359 pruned.
[I 2021-05-07 16:35:47,618] Trial 4360 pruned.
[I 2021-05-07 16:36:38,120] Trial 4361 finished with value: 163.64585876464844 and parameters: {'lr': 0.0014262929310898118, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 984, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4136 with value: 156.2919464111328.
[I 2021-05-07 16:36:38,745] Trial 4362 pruned.
[I 2021-05-07 16:36:39,363] Trial 4363 pruned.
[I 2021-05-07 16:36:39,979] Trial 4364 pruned.
[I 2021-05-07 16:36:40,602] Trial 4365 pruned.
[I 2021-05-07 16:36:41,211] Trial 4366 pruned.
[I 2021-05-07 16:36:41,829] Trial 4367 pruned.
[I 2021-05-07 16:36:42,445] Trial 4368 pruned.
[I 2021-05-07 16:36:43,054] Trial 4369 pruned.
[I 2021-05-07 16:36:43,676] Trial 4370 pruned.
[I 2021-05-07 16:36:44,298] Trial 4371 pruned.
[I 2021-05-07 16:36:44,912] Trial 4372 pruned.
[I 2021-05-07 16:37:36,077] Trial 4373 finished with value: 156.0399932861328 and parameters: {'lr': 0.0013340136652422398, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:37:36,702] Trial 4374 pruned.
[I 2021-05-07 16:38:27,879] Trial 4375 finished with value: 159.09971618652344 and parameters: {'lr': 0.001432748571330546, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:38:28,502] Trial 4376 pruned.
[I 2021-05-07 16:38:29,122] Trial 4377 pruned.
[I 2021-05-07 16:38:30,256] Trial 4378 pruned.
[I 2021-05-07 16:38:30,872] Trial 4379 pruned.
[I 2021-05-07 16:38:31,996] Trial 4380 pruned.
[I 2021-05-07 16:38:32,180] Trial 4381 pruned.
[I 2021-05-07 16:38:32,799] Trial 4382 pruned.
[I 2021-05-07 16:38:33,417] Trial 4383 pruned.
[I 2021-05-07 16:38:34,548] Trial 4384 pruned.
[I 2021-05-07 16:38:35,172] Trial 4385 pruned.
[I 2021-05-07 16:38:35,793] Trial 4386 pruned.
[I 2021-05-07 16:38:36,414] Trial 4387 pruned.
[I 2021-05-07 16:38:37,032] Trial 4388 pruned.
[I 2021-05-07 16:39:28,017] Trial 4389 finished with value: 161.78057861328125 and parameters: {'lr': 0.0013060013987399753, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:39:28,640] Trial 4390 pruned.
[I 2021-05-07 16:39:29,771] Trial 4391 pruned.
[I 2021-05-07 16:39:30,898] Trial 4392 pruned.
[I 2021-05-07 16:40:21,962] Trial 4393 finished with value: 160.75759887695312 and parameters: {'lr': 0.0013023357096519958, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:40:22,585] Trial 4394 pruned.
[I 2021-05-07 16:40:23,203] Trial 4395 pruned.
[I 2021-05-07 16:40:23,822] Trial 4396 pruned.
[I 2021-05-07 16:40:24,443] Trial 4397 pruned.
[I 2021-05-07 16:40:25,070] Trial 4398 pruned.
[I 2021-05-07 16:40:26,199] Trial 4399 pruned.
[I 2021-05-07 16:40:26,816] Trial 4400 pruned.
[I 2021-05-07 16:41:17,827] Trial 4401 finished with value: 165.40277099609375 and parameters: {'lr': 0.0012595209368904845, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:41:18,444] Trial 4402 pruned.
[I 2021-05-07 16:42:09,647] Trial 4403 finished with value: 166.93771362304688 and parameters: {'lr': 0.0014776607901950936, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:42:10,776] Trial 4404 pruned.
[I 2021-05-07 16:42:11,394] Trial 4405 pruned.
[I 2021-05-07 16:42:11,578] Trial 4406 pruned.
[I 2021-05-07 16:42:12,194] Trial 4407 pruned.
[I 2021-05-07 16:42:13,307] Trial 4408 pruned.
[I 2021-05-07 16:42:13,931] Trial 4409 pruned.
[I 2021-05-07 16:42:14,542] Trial 4410 pruned.
[I 2021-05-07 16:42:15,670] Trial 4411 pruned.
[I 2021-05-07 16:42:16,284] Trial 4412 pruned.
[I 2021-05-07 16:42:16,903] Trial 4413 pruned.
[I 2021-05-07 16:42:17,526] Trial 4414 pruned.
[I 2021-05-07 16:42:18,150] Trial 4415 pruned.
[I 2021-05-07 16:43:09,353] Trial 4416 finished with value: 160.28860473632812 and parameters: {'lr': 0.0013846040229416004, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:43:09,976] Trial 4417 pruned.
[I 2021-05-07 16:43:10,597] Trial 4418 pruned.
[I 2021-05-07 16:43:11,725] Trial 4419 pruned.
[I 2021-05-07 16:43:12,859] Trial 4420 pruned.
[I 2021-05-07 16:43:14,485] Trial 4421 pruned.
[I 2021-05-07 16:43:15,108] Trial 4422 pruned.
[I 2021-05-07 16:44:06,280] Trial 4423 finished with value: 163.7920379638672 and parameters: {'lr': 0.0013535325823417971, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:44:07,413] Trial 4424 pruned.
[I 2021-05-07 16:44:08,035] Trial 4425 pruned.
[I 2021-05-07 16:44:09,149] Trial 4426 pruned.
[I 2021-05-07 16:44:09,332] Trial 4427 pruned.
[I 2021-05-07 16:44:10,461] Trial 4428 pruned.
[I 2021-05-07 16:44:11,085] Trial 4429 pruned.
[I 2021-05-07 16:45:02,104] Trial 4430 finished with value: 160.0913543701172 and parameters: {'lr': 0.001330092234380386, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1006, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:45:02,728] Trial 4431 pruned.
[I 2021-05-07 16:45:03,351] Trial 4432 pruned.
[I 2021-05-07 16:45:03,964] Trial 4433 pruned.
[I 2021-05-07 16:45:04,576] Trial 4434 pruned.
[I 2021-05-07 16:45:05,200] Trial 4435 pruned.
[I 2021-05-07 16:45:05,819] Trial 4436 pruned.
[I 2021-05-07 16:45:06,428] Trial 4437 pruned.
[I 2021-05-07 16:45:07,046] Trial 4438 pruned.
[I 2021-05-07 16:45:08,162] Trial 4439 pruned.
[I 2021-05-07 16:45:09,797] Trial 4440 pruned.
[I 2021-05-07 16:45:10,412] Trial 4441 pruned.
[I 2021-05-07 16:45:11,032] Trial 4442 pruned.
[I 2021-05-07 16:45:12,162] Trial 4443 pruned.
[I 2021-05-07 16:45:12,779] Trial 4444 pruned.
[I 2021-05-07 16:45:17,458] Trial 4445 pruned.
[I 2021-05-07 16:45:19,067] Trial 4446 pruned.
[I 2021-05-07 16:45:19,703] Trial 4447 pruned.
[I 2021-05-07 16:45:20,326] Trial 4448 pruned.
[I 2021-05-07 16:45:20,941] Trial 4449 pruned.
[I 2021-05-07 16:45:21,558] Trial 4450 pruned.
[I 2021-05-07 16:45:21,744] Trial 4451 pruned.
[I 2021-05-07 16:46:12,894] Trial 4452 finished with value: 158.2762908935547 and parameters: {'lr': 0.0013401066236534355, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:46:13,518] Trial 4453 pruned.
[I 2021-05-07 16:46:14,137] Trial 4454 pruned.
[I 2021-05-07 16:46:14,755] Trial 4455 pruned.
[I 2021-05-07 16:46:15,373] Trial 4456 pruned.
[I 2021-05-07 16:47:06,452] Trial 4457 finished with value: 163.611328125 and parameters: {'lr': 0.0013632994383090662, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1008, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:47:07,077] Trial 4458 pruned.
[I 2021-05-07 16:47:08,213] Trial 4459 pruned.
[I 2021-05-07 16:47:08,831] Trial 4460 pruned.
[I 2021-05-07 16:47:09,456] Trial 4461 pruned.
[I 2021-05-07 16:47:10,078] Trial 4462 pruned.
[I 2021-05-07 16:47:10,703] Trial 4463 pruned.
[I 2021-05-07 16:47:11,312] Trial 4464 pruned.
[I 2021-05-07 16:47:12,433] Trial 4465 pruned.
[I 2021-05-07 16:47:13,562] Trial 4466 pruned.
[I 2021-05-07 16:47:14,687] Trial 4467 pruned.
[I 2021-05-07 16:47:15,309] Trial 4468 pruned.
[I 2021-05-07 16:47:15,924] Trial 4469 pruned.
[I 2021-05-07 16:47:17,060] Trial 4470 pruned.
[I 2021-05-07 16:47:17,683] Trial 4471 pruned.
[I 2021-05-07 16:47:18,305] Trial 4472 pruned.
[I 2021-05-07 16:47:18,938] Trial 4473 pruned.
[I 2021-05-07 16:47:19,565] Trial 4474 pruned.
[I 2021-05-07 16:47:20,185] Trial 4475 pruned.
[I 2021-05-07 16:47:21,303] Trial 4476 pruned.
[I 2021-05-07 16:47:21,491] Trial 4477 pruned.
[I 2021-05-07 16:47:22,107] Trial 4478 pruned.
[I 2021-05-07 16:47:23,240] Trial 4479 pruned.
[I 2021-05-07 16:47:23,855] Trial 4480 pruned.
[I 2021-05-07 16:47:24,976] Trial 4481 pruned.
[I 2021-05-07 16:47:25,601] Trial 4482 pruned.
[I 2021-05-07 16:47:26,217] Trial 4483 pruned.
[I 2021-05-07 16:47:26,836] Trial 4484 pruned.
[I 2021-05-07 16:47:27,459] Trial 4485 pruned.
[I 2021-05-07 16:47:28,070] Trial 4486 pruned.
[I 2021-05-07 16:47:28,696] Trial 4487 pruned.
[I 2021-05-07 16:47:29,317] Trial 4488 pruned.
[I 2021-05-07 16:47:29,929] Trial 4489 pruned.
[I 2021-05-07 16:47:30,552] Trial 4490 pruned.
[I 2021-05-07 16:47:31,669] Trial 4491 pruned.
[I 2021-05-07 16:47:32,285] Trial 4492 pruned.
[I 2021-05-07 16:47:33,396] Trial 4493 pruned.
[I 2021-05-07 16:47:34,523] Trial 4494 pruned.
[I 2021-05-07 16:47:35,143] Trial 4495 pruned.
[I 2021-05-07 16:47:35,756] Trial 4496 pruned.
[I 2021-05-07 16:47:36,882] Trial 4497 pruned.
[I 2021-05-07 16:47:37,506] Trial 4498 pruned.
[I 2021-05-07 16:47:38,638] Trial 4499 pruned.
[I 2021-05-07 16:47:39,260] Trial 4500 pruned.
[I 2021-05-07 16:47:39,873] Trial 4501 pruned.
[I 2021-05-07 16:47:40,499] Trial 4502 pruned.
[I 2021-05-07 16:47:40,684] Trial 4503 pruned.
[I 2021-05-07 16:47:41,307] Trial 4504 pruned.
[I 2021-05-07 16:47:41,922] Trial 4505 pruned.
[I 2021-05-07 16:47:42,539] Trial 4506 pruned.
[I 2021-05-07 16:47:43,161] Trial 4507 pruned.
[I 2021-05-07 16:47:44,272] Trial 4508 pruned.
[I 2021-05-07 16:47:45,407] Trial 4509 pruned.
[I 2021-05-07 16:47:46,024] Trial 4510 pruned.
[I 2021-05-07 16:47:46,639] Trial 4511 pruned.
[I 2021-05-07 16:47:47,264] Trial 4512 pruned.
[I 2021-05-07 16:47:47,876] Trial 4513 pruned.
[I 2021-05-07 16:47:48,501] Trial 4514 pruned.
[I 2021-05-07 16:47:49,114] Trial 4515 pruned.
[I 2021-05-07 16:47:49,737] Trial 4516 pruned.
[I 2021-05-07 16:47:50,356] Trial 4517 pruned.
[I 2021-05-07 16:47:50,970] Trial 4518 pruned.
[I 2021-05-07 16:47:51,583] Trial 4519 pruned.
[I 2021-05-07 16:48:42,104] Trial 4520 finished with value: 166.6712188720703 and parameters: {'lr': 0.0014327098241309322, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 990, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:48:42,730] Trial 4521 pruned.
[I 2021-05-07 16:48:43,348] Trial 4522 pruned.
[I 2021-05-07 16:48:43,974] Trial 4523 pruned.
[I 2021-05-07 16:48:44,598] Trial 4524 pruned.
[I 2021-05-07 16:48:45,231] Trial 4525 pruned.
[I 2021-05-07 16:48:45,414] Trial 4526 pruned.
[I 2021-05-07 16:48:46,027] Trial 4527 pruned.
[I 2021-05-07 16:48:46,639] Trial 4528 pruned.
[I 2021-05-07 16:48:47,260] Trial 4529 pruned.
[I 2021-05-07 16:48:47,885] Trial 4530 pruned.
[I 2021-05-07 16:48:48,508] Trial 4531 pruned.
[I 2021-05-07 16:48:49,125] Trial 4532 pruned.
[I 2021-05-07 16:48:49,741] Trial 4533 pruned.
[I 2021-05-07 16:48:50,874] Trial 4534 pruned.
[I 2021-05-07 16:48:51,483] Trial 4535 pruned.
[I 2021-05-07 16:48:52,110] Trial 4536 pruned.
[I 2021-05-07 16:48:53,237] Trial 4537 pruned.
[I 2021-05-07 16:48:53,862] Trial 4538 pruned.
[I 2021-05-07 16:48:54,487] Trial 4539 pruned.
[I 2021-05-07 16:48:55,112] Trial 4540 pruned.
[I 2021-05-07 16:48:56,218] Trial 4541 pruned.
[I 2021-05-07 16:48:56,837] Trial 4542 pruned.
[I 2021-05-07 16:48:57,456] Trial 4543 pruned.
[I 2021-05-07 16:48:58,067] Trial 4544 pruned.
[I 2021-05-07 16:48:58,680] Trial 4545 pruned.
[I 2021-05-07 16:48:59,817] Trial 4546 pruned.
[I 2021-05-07 16:49:00,429] Trial 4547 pruned.
[I 2021-05-07 16:49:01,049] Trial 4548 pruned.
[I 2021-05-07 16:49:01,238] Trial 4549 pruned.
[I 2021-05-07 16:49:01,484] Trial 4550 pruned.
[I 2021-05-07 16:49:02,099] Trial 4551 pruned.
[I 2021-05-07 16:49:02,736] Trial 4552 pruned.
[I 2021-05-07 16:49:03,346] Trial 4553 pruned.
[I 2021-05-07 16:49:03,969] Trial 4554 pruned.
[I 2021-05-07 16:49:04,586] Trial 4555 pruned.
[I 2021-05-07 16:49:05,210] Trial 4556 pruned.
[I 2021-05-07 16:49:06,315] Trial 4557 pruned.
[I 2021-05-07 16:49:07,422] Trial 4558 pruned.
[I 2021-05-07 16:49:07,670] Trial 4559 pruned.
[I 2021-05-07 16:49:08,789] Trial 4560 pruned.
[I 2021-05-07 16:49:09,399] Trial 4561 pruned.
[I 2021-05-07 16:49:10,535] Trial 4562 pruned.
[I 2021-05-07 16:49:11,160] Trial 4563 pruned.
[I 2021-05-07 16:49:11,792] Trial 4564 pruned.
[I 2021-05-07 16:49:12,399] Trial 4565 pruned.
[I 2021-05-07 16:49:13,018] Trial 4566 pruned.
[I 2021-05-07 16:50:04,240] Trial 4567 finished with value: 158.07565307617188 and parameters: {'lr': 0.0013224901329295712, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:50:04,862] Trial 4568 pruned.
[I 2021-05-07 16:50:05,484] Trial 4569 pruned.
[I 2021-05-07 16:50:06,104] Trial 4570 pruned.
[I 2021-05-07 16:50:07,234] Trial 4571 pruned.
[I 2021-05-07 16:50:07,857] Trial 4572 pruned.
[I 2021-05-07 16:50:08,470] Trial 4573 pruned.
[I 2021-05-07 16:50:09,092] Trial 4574 pruned.
[I 2021-05-07 16:50:09,719] Trial 4575 pruned.
[I 2021-05-07 16:50:10,842] Trial 4576 pruned.
[I 2021-05-07 16:50:11,467] Trial 4577 pruned.
[I 2021-05-07 16:50:12,087] Trial 4578 pruned.
[I 2021-05-07 16:51:03,142] Trial 4579 finished with value: 170.0126190185547 and parameters: {'lr': 0.0012898211003749508, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1004, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:51:03,768] Trial 4580 pruned.
[I 2021-05-07 16:51:04,386] Trial 4581 pruned.
[I 2021-05-07 16:51:05,011] Trial 4582 pruned.
[I 2021-05-07 16:51:05,634] Trial 4583 pruned.
[I 2021-05-07 16:51:06,262] Trial 4584 pruned.
[I 2021-05-07 16:51:07,383] Trial 4585 pruned.
[I 2021-05-07 16:51:08,007] Trial 4586 pruned.
[I 2021-05-07 16:51:08,630] Trial 4587 pruned.
[I 2021-05-07 16:51:09,248] Trial 4588 pruned.
[I 2021-05-07 16:51:09,874] Trial 4589 pruned.
[I 2021-05-07 16:51:10,484] Trial 4590 pruned.
[I 2021-05-07 16:51:11,109] Trial 4591 pruned.
[I 2021-05-07 16:51:12,227] Trial 4592 pruned.
[I 2021-05-07 16:51:12,851] Trial 4593 pruned.
[I 2021-05-07 16:51:13,968] Trial 4594 pruned.
[I 2021-05-07 16:51:15,082] Trial 4595 pruned.
[I 2021-05-07 16:51:15,710] Trial 4596 pruned.
[I 2021-05-07 16:51:16,335] Trial 4597 pruned.
[I 2021-05-07 16:51:16,972] Trial 4598 pruned.
[I 2021-05-07 16:51:17,585] Trial 4599 pruned.
[I 2021-05-07 16:51:18,207] Trial 4600 pruned.
[I 2021-05-07 16:51:22,906] Trial 4601 pruned.
[I 2021-05-07 16:51:23,518] Trial 4602 pruned.
[I 2021-05-07 16:51:24,131] Trial 4603 pruned.
[I 2021-05-07 16:51:25,266] Trial 4604 pruned.
[I 2021-05-07 16:51:25,885] Trial 4605 pruned.
[I 2021-05-07 16:52:17,091] Trial 4606 finished with value: 157.54176330566406 and parameters: {'lr': 0.0013000536815031417, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:52:18,722] Trial 4607 pruned.
[I 2021-05-07 16:52:19,352] Trial 4608 pruned.
[I 2021-05-07 16:52:19,978] Trial 4609 pruned.
[I 2021-05-07 16:52:20,615] Trial 4610 pruned.
[I 2021-05-07 16:52:21,238] Trial 4611 pruned.
[I 2021-05-07 16:52:21,863] Trial 4612 pruned.
[I 2021-05-07 16:52:22,494] Trial 4613 pruned.
[I 2021-05-07 16:52:23,127] Trial 4614 pruned.
[I 2021-05-07 16:52:24,244] Trial 4615 pruned.
[I 2021-05-07 16:52:24,873] Trial 4616 pruned.
[I 2021-05-07 16:52:25,502] Trial 4617 pruned.
[I 2021-05-07 16:52:26,125] Trial 4618 pruned.
[I 2021-05-07 16:52:26,749] Trial 4619 pruned.
[I 2021-05-07 16:52:27,379] Trial 4620 pruned.
[I 2021-05-07 16:52:28,513] Trial 4621 pruned.
[I 2021-05-07 16:52:29,135] Trial 4622 pruned.
[I 2021-05-07 16:53:19,445] Trial 4623 finished with value: 156.59605407714844 and parameters: {'lr': 0.0010800537062392562, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1022, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:53:20,059] Trial 4624 pruned.
[I 2021-05-07 16:53:20,677] Trial 4625 pruned.
[I 2021-05-07 16:53:21,293] Trial 4626 pruned.
[I 2021-05-07 16:53:22,409] Trial 4627 pruned.
[I 2021-05-07 16:53:23,027] Trial 4628 pruned.
[I 2021-05-07 16:53:23,633] Trial 4629 pruned.
[I 2021-05-07 16:53:24,748] Trial 4630 pruned.
[I 2021-05-07 16:53:25,862] Trial 4631 pruned.
[I 2021-05-07 16:53:26,474] Trial 4632 pruned.
[I 2021-05-07 16:53:27,093] Trial 4633 pruned.
[I 2021-05-07 16:53:27,706] Trial 4634 pruned.
[I 2021-05-07 16:53:28,318] Trial 4635 pruned.
[I 2021-05-07 16:53:28,931] Trial 4636 pruned.
[I 2021-05-07 16:53:30,526] Trial 4637 pruned.
[I 2021-05-07 16:53:31,151] Trial 4638 pruned.
[I 2021-05-07 16:53:31,760] Trial 4639 pruned.
[I 2021-05-07 16:53:32,371] Trial 4640 pruned.
[I 2021-05-07 16:53:33,488] Trial 4641 pruned.
[I 2021-05-07 16:53:34,108] Trial 4642 pruned.
[I 2021-05-07 16:54:23,754] Trial 4643 finished with value: 157.12127685546875 and parameters: {'lr': 0.0010176588040630963, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 988, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:54:24,007] Trial 4644 pruned.
[I 2021-05-07 16:54:24,616] Trial 4645 pruned.
[I 2021-05-07 16:54:25,237] Trial 4646 pruned.
[I 2021-05-07 16:54:25,846] Trial 4647 pruned.
[I 2021-05-07 16:54:26,959] Trial 4648 pruned.
[I 2021-05-07 16:54:27,566] Trial 4649 pruned.
[I 2021-05-07 16:54:28,178] Trial 4650 pruned.
[I 2021-05-07 16:54:28,788] Trial 4651 pruned.
[I 2021-05-07 16:54:29,406] Trial 4652 pruned.
[I 2021-05-07 16:54:29,654] Trial 4653 pruned.
[I 2021-05-07 16:54:30,770] Trial 4654 pruned.
[I 2021-05-07 16:54:30,958] Trial 4655 pruned.
[I 2021-05-07 16:54:32,075] Trial 4656 pruned.
[I 2021-05-07 16:54:32,683] Trial 4657 pruned.
[I 2021-05-07 16:54:33,302] Trial 4658 pruned.
[I 2021-05-07 16:54:33,919] Trial 4659 pruned.
[I 2021-05-07 16:54:34,549] Trial 4660 pruned.
[I 2021-05-07 16:54:35,166] Trial 4661 pruned.
[I 2021-05-07 16:54:36,259] Trial 4662 pruned.
[I 2021-05-07 16:54:36,510] Trial 4663 pruned.
[I 2021-05-07 16:54:37,125] Trial 4664 pruned.
[I 2021-05-07 16:54:37,737] Trial 4665 pruned.
[I 2021-05-07 16:54:38,356] Trial 4666 pruned.
[I 2021-05-07 16:54:38,971] Trial 4667 pruned.
[I 2021-05-07 16:54:39,573] Trial 4668 pruned.
[I 2021-05-07 16:54:40,182] Trial 4669 pruned.
[I 2021-05-07 16:54:40,803] Trial 4670 pruned.
[I 2021-05-07 16:54:41,421] Trial 4671 pruned.
[I 2021-05-07 16:54:41,668] Trial 4672 pruned.
[I 2021-05-07 16:54:44,290] Trial 4673 pruned.
[I 2021-05-07 16:54:44,903] Trial 4674 pruned.
[I 2021-05-07 16:54:45,997] Trial 4675 pruned.
[I 2021-05-07 16:54:46,610] Trial 4676 pruned.
[I 2021-05-07 16:54:47,219] Trial 4677 pruned.
[I 2021-05-07 16:54:47,834] Trial 4678 pruned.
[I 2021-05-07 16:54:48,447] Trial 4679 pruned.
[I 2021-05-07 16:55:38,717] Trial 4680 finished with value: 158.68722534179688 and parameters: {'lr': 0.0011452838389024102, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:55:38,972] Trial 4681 pruned.
[I 2021-05-07 16:55:39,158] Trial 4682 pruned.
[I 2021-05-07 16:55:40,274] Trial 4683 pruned.
[I 2021-05-07 16:55:40,895] Trial 4684 pruned.
[I 2021-05-07 16:55:42,010] Trial 4685 pruned.
[I 2021-05-07 16:55:42,627] Trial 4686 pruned.
[I 2021-05-07 16:55:43,243] Trial 4687 pruned.
[I 2021-05-07 16:55:43,864] Trial 4688 pruned.
[I 2021-05-07 16:55:44,481] Trial 4689 pruned.
[I 2021-05-07 16:55:46,089] Trial 4690 pruned.
[I 2021-05-07 16:55:46,337] Trial 4691 pruned.
[I 2021-05-07 16:55:46,954] Trial 4692 pruned.
[I 2021-05-07 16:55:47,571] Trial 4693 pruned.
[I 2021-05-07 16:55:48,186] Trial 4694 pruned.
[I 2021-05-07 16:55:48,800] Trial 4695 pruned.
[I 2021-05-07 16:55:49,416] Trial 4696 pruned.
[I 2021-05-07 16:55:50,026] Trial 4697 pruned.
[I 2021-05-07 16:55:50,641] Trial 4698 pruned.
[I 2021-05-07 16:55:51,256] Trial 4699 pruned.
[I 2021-05-07 16:55:51,504] Trial 4700 pruned.
[I 2021-05-07 16:55:52,115] Trial 4701 pruned.
[I 2021-05-07 16:55:52,730] Trial 4702 pruned.
[I 2021-05-07 16:55:53,340] Trial 4703 pruned.
[I 2021-05-07 16:55:53,954] Trial 4704 pruned.
[I 2021-05-07 16:55:54,570] Trial 4705 pruned.
[I 2021-05-07 16:55:55,688] Trial 4706 pruned.
[I 2021-05-07 16:55:56,299] Trial 4707 pruned.
[I 2021-05-07 16:55:57,419] Trial 4708 pruned.
[I 2021-05-07 16:55:57,608] Trial 4709 pruned.
[I 2021-05-07 16:55:58,221] Trial 4710 pruned.
[I 2021-05-07 16:55:58,471] Trial 4711 pruned.
[I 2021-05-07 16:55:59,086] Trial 4712 pruned.
[I 2021-05-07 16:56:00,207] Trial 4713 pruned.
[I 2021-05-07 16:56:00,826] Trial 4714 pruned.
[I 2021-05-07 16:56:01,438] Trial 4715 pruned.
[I 2021-05-07 16:56:02,552] Trial 4716 pruned.
[I 2021-05-07 16:56:03,167] Trial 4717 pruned.
[I 2021-05-07 16:56:03,782] Trial 4718 pruned.
[I 2021-05-07 16:56:04,033] Trial 4719 pruned.
[I 2021-05-07 16:56:04,649] Trial 4720 pruned.
[I 2021-05-07 16:56:07,770] Trial 4721 pruned.
[I 2021-05-07 16:56:11,383] Trial 4722 pruned.
[I 2021-05-07 16:56:12,505] Trial 4723 pruned.
[I 2021-05-07 16:56:13,123] Trial 4724 pruned.
[I 2021-05-07 16:56:13,751] Trial 4725 pruned.
[I 2021-05-07 16:56:14,367] Trial 4726 pruned.
[I 2021-05-07 16:56:14,992] Trial 4727 pruned.
[I 2021-05-07 16:57:05,892] Trial 4728 finished with value: 159.21580505371094 and parameters: {'lr': 0.0012511024601902976, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:57:06,149] Trial 4729 pruned.
[I 2021-05-07 16:57:06,767] Trial 4730 pruned.
[I 2021-05-07 16:57:07,391] Trial 4731 pruned.
[I 2021-05-07 16:57:08,021] Trial 4732 pruned.
[I 2021-05-07 16:57:08,651] Trial 4733 pruned.
[I 2021-05-07 16:57:09,267] Trial 4734 pruned.
[I 2021-05-07 16:57:09,894] Trial 4735 pruned.
[I 2021-05-07 16:57:10,511] Trial 4736 pruned.
[I 2021-05-07 16:57:10,762] Trial 4737 pruned.
[I 2021-05-07 16:57:10,966] Trial 4738 pruned.
[I 2021-05-07 16:57:11,584] Trial 4739 pruned.
[I 2021-05-07 16:57:12,211] Trial 4740 pruned.
[I 2021-05-07 16:57:12,838] Trial 4741 pruned.
[I 2021-05-07 16:57:13,459] Trial 4742 pruned.
[I 2021-05-07 16:57:14,076] Trial 4743 pruned.
[I 2021-05-07 16:57:15,214] Trial 4744 pruned.
[I 2021-05-07 16:57:15,845] Trial 4745 pruned.
[I 2021-05-07 16:57:16,980] Trial 4746 pruned.
[I 2021-05-07 16:57:17,230] Trial 4747 pruned.
[I 2021-05-07 16:57:17,851] Trial 4748 pruned.
[I 2021-05-07 16:57:18,986] Trial 4749 pruned.
[I 2021-05-07 16:57:19,616] Trial 4750 pruned.
[I 2021-05-07 16:57:20,231] Trial 4751 pruned.
[I 2021-05-07 16:57:20,859] Trial 4752 pruned.
[I 2021-05-07 16:57:21,995] Trial 4753 pruned.
[I 2021-05-07 16:57:22,622] Trial 4754 pruned.
[I 2021-05-07 16:57:23,237] Trial 4755 pruned.
[I 2021-05-07 16:57:23,488] Trial 4756 pruned.
[I 2021-05-07 16:57:24,113] Trial 4757 pruned.
[I 2021-05-07 16:57:24,729] Trial 4758 pruned.
[I 2021-05-07 16:57:25,348] Trial 4759 pruned.
[I 2021-05-07 16:58:15,754] Trial 4760 finished with value: 160.56434631347656 and parameters: {'lr': 0.0011691439672196807, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 984, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:58:16,908] Trial 4761 pruned.
[I 2021-05-07 16:58:18,034] Trial 4762 pruned.
[I 2021-05-07 16:58:18,647] Trial 4763 pruned.
[I 2021-05-07 16:58:19,283] Trial 4764 pruned.
[I 2021-05-07 16:58:19,475] Trial 4765 pruned.
[I 2021-05-07 16:58:19,726] Trial 4766 pruned.
[I 2021-05-07 16:59:10,023] Trial 4767 finished with value: 165.4767303466797 and parameters: {'lr': 0.0011680390160614907, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 16:59:10,654] Trial 4768 pruned.
[I 2021-05-07 16:59:11,280] Trial 4769 pruned.
[I 2021-05-07 16:59:11,903] Trial 4770 pruned.
[I 2021-05-07 16:59:12,524] Trial 4771 pruned.
[I 2021-05-07 16:59:13,152] Trial 4772 pruned.
[I 2021-05-07 16:59:14,266] Trial 4773 pruned.
[I 2021-05-07 16:59:14,895] Trial 4774 pruned.
[I 2021-05-07 16:59:15,146] Trial 4775 pruned.
[I 2021-05-07 17:00:05,652] Trial 4776 finished with value: 166.4490509033203 and parameters: {'lr': 0.0010002172310781463, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 986, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:00:06,285] Trial 4777 pruned.
[I 2021-05-07 17:00:06,904] Trial 4778 pruned.
[I 2021-05-07 17:00:08,030] Trial 4779 pruned.
[I 2021-05-07 17:00:08,652] Trial 4780 pruned.
[I 2021-05-07 17:00:59,298] Trial 4781 finished with value: 167.03526306152344 and parameters: {'lr': 0.00107976793818591, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 988, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:00:59,928] Trial 4782 pruned.
[I 2021-05-07 17:01:01,053] Trial 4783 pruned.
[I 2021-05-07 17:01:01,309] Trial 4784 pruned.
[I 2021-05-07 17:01:01,940] Trial 4785 pruned.
[I 2021-05-07 17:01:02,572] Trial 4786 pruned.
[I 2021-05-07 17:01:03,185] Trial 4787 pruned.
[I 2021-05-07 17:01:03,814] Trial 4788 pruned.
[I 2021-05-07 17:01:04,443] Trial 4789 pruned.
[I 2021-05-07 17:01:05,059] Trial 4790 pruned.
[I 2021-05-07 17:01:05,673] Trial 4791 pruned.
[I 2021-05-07 17:01:06,297] Trial 4792 pruned.
[I 2021-05-07 17:01:06,561] Trial 4793 pruned.
[I 2021-05-07 17:01:06,754] Trial 4794 pruned.
[I 2021-05-07 17:01:07,370] Trial 4795 pruned.
[I 2021-05-07 17:01:07,984] Trial 4796 pruned.
[I 2021-05-07 17:01:08,616] Trial 4797 pruned.
[I 2021-05-07 17:01:09,246] Trial 4798 pruned.
[I 2021-05-07 17:01:10,356] Trial 4799 pruned.
[I 2021-05-07 17:01:11,466] Trial 4800 pruned.
[I 2021-05-07 17:01:12,601] Trial 4801 pruned.
[I 2021-05-07 17:01:13,229] Trial 4802 pruned.
[I 2021-05-07 17:01:13,484] Trial 4803 pruned.
[I 2021-05-07 17:01:14,107] Trial 4804 pruned.
[I 2021-05-07 17:01:14,736] Trial 4805 pruned.
[I 2021-05-07 17:01:15,355] Trial 4806 pruned.
[I 2021-05-07 17:01:16,455] Trial 4807 pruned.
[I 2021-05-07 17:01:17,082] Trial 4808 pruned.
[I 2021-05-07 17:01:17,714] Trial 4809 pruned.
[I 2021-05-07 17:01:18,336] Trial 4810 pruned.
[I 2021-05-07 17:01:19,942] Trial 4811 pruned.
[I 2021-05-07 17:01:20,199] Trial 4812 pruned.
[I 2021-05-07 17:01:20,831] Trial 4813 pruned.
[I 2021-05-07 17:01:21,465] Trial 4814 pruned.
[I 2021-05-07 17:02:10,755] Trial 4815 finished with value: 160.95909118652344 and parameters: {'lr': 0.0014041914460395492, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 974, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'linear'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:02:11,385] Trial 4816 pruned.
[I 2021-05-07 17:02:12,493] Trial 4817 pruned.
[I 2021-05-07 17:02:13,110] Trial 4818 pruned.
[I 2021-05-07 17:02:13,725] Trial 4819 pruned.
[I 2021-05-07 17:02:14,351] Trial 4820 pruned.
[I 2021-05-07 17:02:14,609] Trial 4821 pruned.
[I 2021-05-07 17:02:15,730] Trial 4822 pruned.
[I 2021-05-07 17:02:15,924] Trial 4823 pruned.
[I 2021-05-07 17:02:16,545] Trial 4824 pruned.
[I 2021-05-07 17:02:17,180] Trial 4825 pruned.
[I 2021-05-07 17:02:18,316] Trial 4826 pruned.
[I 2021-05-07 17:02:18,955] Trial 4827 pruned.
[I 2021-05-07 17:02:19,578] Trial 4828 pruned.
[I 2021-05-07 17:02:22,754] Trial 4829 pruned.
[I 2021-05-07 17:02:23,387] Trial 4830 pruned.
[I 2021-05-07 17:02:23,657] Trial 4831 pruned.
[I 2021-05-07 17:02:24,771] Trial 4832 pruned.
[I 2021-05-07 17:02:25,401] Trial 4833 pruned.
[I 2021-05-07 17:02:26,032] Trial 4834 pruned.
[I 2021-05-07 17:02:26,641] Trial 4835 pruned.
[I 2021-05-07 17:02:27,270] Trial 4836 pruned.
[I 2021-05-07 17:02:27,895] Trial 4837 pruned.
[I 2021-05-07 17:02:28,512] Trial 4838 pruned.
[I 2021-05-07 17:02:29,126] Trial 4839 pruned.
[I 2021-05-07 17:02:29,383] Trial 4840 pruned.
[I 2021-05-07 17:02:30,518] Trial 4841 pruned.
[I 2021-05-07 17:02:31,148] Trial 4842 pruned.
[I 2021-05-07 17:02:31,772] Trial 4843 pruned.
[I 2021-05-07 17:02:32,389] Trial 4844 pruned.
[I 2021-05-07 17:03:23,514] Trial 4845 finished with value: 156.59718322753906 and parameters: {'lr': 0.0010067211446694736, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:03:24,135] Trial 4846 pruned.
[I 2021-05-07 17:03:24,767] Trial 4847 pruned.
[I 2021-05-07 17:03:25,398] Trial 4848 pruned.
[I 2021-05-07 17:03:25,655] Trial 4849 pruned.
[I 2021-05-07 17:03:26,790] Trial 4850 pruned.
[I 2021-05-07 17:03:26,983] Trial 4851 pruned.
[I 2021-05-07 17:03:27,613] Trial 4852 pruned.
[I 2021-05-07 17:03:28,239] Trial 4853 pruned.
[I 2021-05-07 17:03:29,374] Trial 4854 pruned.
[I 2021-05-07 17:03:30,011] Trial 4855 pruned.
[I 2021-05-07 17:03:30,637] Trial 4856 pruned.
[I 2021-05-07 17:03:31,262] Trial 4857 pruned.
[I 2021-05-07 17:03:31,895] Trial 4858 pruned.
[I 2021-05-07 17:03:32,153] Trial 4859 pruned.
[I 2021-05-07 17:03:32,787] Trial 4860 pruned.
[I 2021-05-07 17:03:33,413] Trial 4861 pruned.
[I 2021-05-07 17:03:34,063] Trial 4862 pruned.
[I 2021-05-07 17:03:36,723] Trial 4863 pruned.
[I 2021-05-07 17:03:37,356] Trial 4864 pruned.
[I 2021-05-07 17:03:38,483] Trial 4865 pruned.
[I 2021-05-07 17:03:39,622] Trial 4866 pruned.
[I 2021-05-07 17:03:40,765] Trial 4867 pruned.
[I 2021-05-07 17:03:41,020] Trial 4868 pruned.
[I 2021-05-07 17:03:42,170] Trial 4869 pruned.
[I 2021-05-07 17:03:42,796] Trial 4870 pruned.
[I 2021-05-07 17:03:43,425] Trial 4871 pruned.
[I 2021-05-07 17:03:44,042] Trial 4872 pruned.
[I 2021-05-07 17:03:44,673] Trial 4873 pruned.
[I 2021-05-07 17:03:45,307] Trial 4874 pruned.
[I 2021-05-07 17:03:45,925] Trial 4875 pruned.
[I 2021-05-07 17:04:36,808] Trial 4876 finished with value: 158.08372497558594 and parameters: {'lr': 0.0011576668771137372, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1002, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:04:37,928] Trial 4877 pruned.
[I 2021-05-07 17:04:38,185] Trial 4878 pruned.
[I 2021-05-07 17:04:38,377] Trial 4879 pruned.
[I 2021-05-07 17:04:39,005] Trial 4880 pruned.
[I 2021-05-07 17:04:39,634] Trial 4881 pruned.
[I 2021-05-07 17:04:40,755] Trial 4882 pruned.
[I 2021-05-07 17:04:41,383] Trial 4883 pruned.
[I 2021-05-07 17:04:42,007] Trial 4884 pruned.
[I 2021-05-07 17:04:42,638] Trial 4885 pruned.
[I 2021-05-07 17:04:43,271] Trial 4886 pruned.
[I 2021-05-07 17:04:43,903] Trial 4887 pruned.
[I 2021-05-07 17:04:44,154] Trial 4888 pruned.
[I 2021-05-07 17:04:44,786] Trial 4889 pruned.
[I 2021-05-07 17:04:45,419] Trial 4890 pruned.
[I 2021-05-07 17:04:46,051] Trial 4891 pruned.
[I 2021-05-07 17:04:46,673] Trial 4892 pruned.
[I 2021-05-07 17:04:47,305] Trial 4893 pruned.
[I 2021-05-07 17:04:47,933] Trial 4894 pruned.
[I 2021-05-07 17:04:48,562] Trial 4895 pruned.
[I 2021-05-07 17:05:39,003] Trial 4896 finished with value: 161.62307739257812 and parameters: {'lr': 0.0012843639468935982, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 990, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4373 with value: 156.0399932861328.
[I 2021-05-07 17:05:39,260] Trial 4897 pruned.
[I 2021-05-07 17:05:39,877] Trial 4898 pruned.
[I 2021-05-07 17:05:40,496] Trial 4899 pruned.
[I 2021-05-07 17:05:41,122] Trial 4900 pruned.
[I 2021-05-07 17:05:42,262] Trial 4901 pruned.
[I 2021-05-07 17:05:42,893] Trial 4902 pruned.
[I 2021-05-07 17:05:43,516] Trial 4903 pruned.
[I 2021-05-07 17:05:44,144] Trial 4904 pruned.
[I 2021-05-07 17:05:44,775] Trial 4905 pruned.
[I 2021-05-07 17:05:45,032] Trial 4906 pruned.
[I 2021-05-07 17:05:45,225] Trial 4907 pruned.
[I 2021-05-07 17:05:45,856] Trial 4908 pruned.
[I 2021-05-07 17:05:46,487] Trial 4909 pruned.
[I 2021-05-07 17:05:47,107] Trial 4910 pruned.
[I 2021-05-07 17:05:47,741] Trial 4911 pruned.
[I 2021-05-07 17:05:48,367] Trial 4912 pruned.
[I 2021-05-07 17:05:48,997] Trial 4913 pruned.
[I 2021-05-07 17:05:49,616] Trial 4914 pruned.
[I 2021-05-07 17:05:50,244] Trial 4915 pruned.
[I 2021-05-07 17:05:50,502] Trial 4916 pruned.
[I 2021-05-07 17:05:51,124] Trial 4917 pruned.
[I 2021-05-07 17:05:51,756] Trial 4918 pruned.
[I 2021-05-07 17:05:52,896] Trial 4919 pruned.
[I 2021-05-07 17:05:54,035] Trial 4920 pruned.
[I 2021-05-07 17:05:54,669] Trial 4921 pruned.
[I 2021-05-07 17:05:55,282] Trial 4922 pruned.
[I 2021-05-07 17:05:55,920] Trial 4923 pruned.
[I 2021-05-07 17:05:56,546] Trial 4924 pruned.
[I 2021-05-07 17:05:56,803] Trial 4925 pruned.
[I 2021-05-07 17:05:57,430] Trial 4926 pruned.
[I 2021-05-07 17:05:58,561] Trial 4927 pruned.
[I 2021-05-07 17:05:59,191] Trial 4928 pruned.
[I 2021-05-07 17:05:59,818] Trial 4929 pruned.
[I 2021-05-07 17:06:00,451] Trial 4930 pruned.
[I 2021-05-07 17:06:01,585] Trial 4931 pruned.
[I 2021-05-07 17:06:02,207] Trial 4932 pruned.
[I 2021-05-07 17:06:02,842] Trial 4933 pruned.
[I 2021-05-07 17:06:03,098] Trial 4934 pruned.
[I 2021-05-07 17:06:03,292] Trial 4935 pruned.
[I 2021-05-07 17:06:03,923] Trial 4936 pruned.
[I 2021-05-07 17:06:04,555] Trial 4937 pruned.
[I 2021-05-07 17:06:05,685] Trial 4938 pruned.
[I 2021-05-07 17:06:06,304] Trial 4939 pruned.
[I 2021-05-07 17:06:06,927] Trial 4940 pruned.
[I 2021-05-07 17:06:07,559] Trial 4941 pruned.
[I 2021-05-07 17:06:08,185] Trial 4942 pruned.
[I 2021-05-07 17:06:09,321] Trial 4943 pruned.
[I 2021-05-07 17:06:09,580] Trial 4944 pruned.
[I 2021-05-07 17:06:10,202] Trial 4945 pruned.
[I 2021-05-07 17:06:10,837] Trial 4946 pruned.
[I 2021-05-07 17:06:11,462] Trial 4947 pruned.
[I 2021-05-07 17:06:12,095] Trial 4948 pruned.
[I 2021-05-07 17:06:12,732] Trial 4949 pruned.
[I 2021-05-07 17:06:13,360] Trial 4950 pruned.
[I 2021-05-07 17:06:13,984] Trial 4951 pruned.
[I 2021-05-07 17:06:15,117] Trial 4952 pruned.
[I 2021-05-07 17:06:15,373] Trial 4953 pruned.
[I 2021-05-07 17:06:16,003] Trial 4954 pruned.
[I 2021-05-07 17:06:16,629] Trial 4955 pruned.
[I 2021-05-07 17:06:17,260] Trial 4956 pruned.
[I 2021-05-07 17:06:17,900] Trial 4957 pruned.
[I 2021-05-07 17:06:18,525] Trial 4958 pruned.
[I 2021-05-07 17:06:19,162] Trial 4959 pruned.
[I 2021-05-07 17:06:19,789] Trial 4960 pruned.
[I 2021-05-07 17:06:20,420] Trial 4961 pruned.
[I 2021-05-07 17:06:20,679] Trial 4962 pruned.
[I 2021-05-07 17:06:21,301] Trial 4963 pruned.
[I 2021-05-07 17:06:21,497] Trial 4964 pruned.
[I 2021-05-07 17:06:22,131] Trial 4965 pruned.
[I 2021-05-07 17:06:22,766] Trial 4966 pruned.
[I 2021-05-07 17:06:23,403] Trial 4967 pruned.
[I 2021-05-07 17:06:24,038] Trial 4968 pruned.
[I 2021-05-07 17:06:24,674] Trial 4969 pruned.
[I 2021-05-07 17:06:25,306] Trial 4970 pruned.
[I 2021-05-07 17:06:25,933] Trial 4971 pruned.
[I 2021-05-07 17:06:26,191] Trial 4972 pruned.
[I 2021-05-07 17:06:26,813] Trial 4973 pruned.
[I 2021-05-07 17:06:27,444] Trial 4974 pruned.
[I 2021-05-07 17:06:28,080] Trial 4975 pruned.
[I 2021-05-07 17:06:28,715] Trial 4976 pruned.
[I 2021-05-07 17:06:30,362] Trial 4977 pruned.
[I 2021-05-07 17:06:30,985] Trial 4978 pruned.
[I 2021-05-07 17:06:31,622] Trial 4979 pruned.
[I 2021-05-07 17:06:32,763] Trial 4980 pruned.
[I 2021-05-07 17:06:33,391] Trial 4981 pruned.
[I 2021-05-07 17:06:33,647] Trial 4982 pruned.
[I 2021-05-07 17:06:34,281] Trial 4983 pruned.
[I 2021-05-07 17:06:34,902] Trial 4984 pruned.
[I 2021-05-07 17:06:35,526] Trial 4985 pruned.
[I 2021-05-07 17:06:36,160] Trial 4986 pruned.
[I 2021-05-07 17:07:27,344] Trial 4987 finished with value: 154.99546813964844 and parameters: {'lr': 0.0010009502114520618, 'batch_size': 16, 'n_layers': 3, 'neurons_HL1': 1024, 'HL0_ac_fn': 'relu', 'HL1_ac_fn': 'relu', 'HL2_ac_fn': 'relu'}. Best is trial 4987 with value: 154.99546813964844.
[I 2021-05-07 17:07:27,981] Trial 4988 pruned.
[I 2021-05-07 17:07:28,616] Trial 4989 pruned.
[I 2021-05-07 17:07:29,778] Trial 4990 pruned.
[I 2021-05-07 17:07:30,414] Trial 4991 pruned.
[I 2021-05-07 17:07:31,047] Trial 4992 pruned.
[I 2021-05-07 17:07:31,676] Trial 4993 pruned.
[I 2021-05-07 17:07:32,818] Trial 4994 pruned.
[I 2021-05-07 17:07:33,453] Trial 4995 pruned.
[I 2021-05-07 17:07:34,080] Trial 4996 pruned.
[I 2021-05-07 17:07:34,719] Trial 4997 pruned.
[I 2021-05-07 17:07:35,354] Trial 4998 pruned.
[I 2021-05-07 17:07:35,991] Trial 4999 pruned.
Wall time: 1h 58min 9s
pca_trials_df = pca_study.trials_dataframe()
pca_trials_df
| number | value | datetime_start | datetime_complete | duration | params_HL0_ac_fn | params_HL1_ac_fn | params_HL2_ac_fn | params_HL3_ac_fn | params_HL4_ac_fn | params_batch_size | params_lr | params_n_layers | params_neurons_HL1 | state | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 168.740234 | 2021-05-07 15:09:26.415515 | 2021-05-07 15:10:03.634030 | 0 days 00:00:37.218515 | relu | relu | NaN | NaN | NaN | 16 | 0.005612 | 2 | 60 | COMPLETE |
| 1 | 1 | 175.975861 | 2021-05-07 15:10:03.636025 | 2021-05-07 15:10:49.637065 | 0 days 00:00:46.001040 | relu | relu | linear | NaN | NaN | 16 | 0.087060 | 3 | 538 | COMPLETE |
| 2 | 2 | 166.189377 | 2021-05-07 15:10:49.639060 | 2021-05-07 15:11:26.524465 | 0 days 00:00:36.885405 | relu | linear | NaN | NaN | NaN | 16 | 0.008168 | 2 | 624 | COMPLETE |
| 3 | 3 | 248.698746 | 2021-05-07 15:11:26.525462 | 2021-05-07 15:11:35.563304 | 0 days 00:00:09.037842 | linear | linear | NaN | NaN | NaN | 64 | 0.041380 | 2 | 508 | COMPLETE |
| 4 | 4 | 174.704483 | 2021-05-07 15:11:35.564302 | 2021-05-07 15:11:43.908997 | 0 days 00:00:08.344695 | relu | relu | relu | linear | linear | 128 | 0.004202 | 5 | 964 | COMPLETE |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 4995 | 4995 | 212.278336 | 2021-05-07 17:07:32.820118 | 2021-05-07 17:07:33.453425 | 0 days 00:00:00.633307 | relu | relu | relu | NaN | NaN | 16 | 0.036656 | 3 | 1024 | PRUNED |
| 4996 | 4996 | 193.166946 | 2021-05-07 17:07:33.455420 | 2021-05-07 17:07:34.080748 | 0 days 00:00:00.625328 | relu | relu | relu | NaN | NaN | 16 | 0.001010 | 3 | 990 | PRUNED |
| 4997 | 4997 | 205.143890 | 2021-05-07 17:07:34.082743 | 2021-05-07 17:07:34.718045 | 0 days 00:00:00.635302 | relu | relu | relu | NaN | NaN | 16 | 0.001169 | 3 | 1006 | PRUNED |
| 4998 | 4998 | 247.525085 | 2021-05-07 17:07:34.721037 | 2021-05-07 17:07:35.354344 | 0 days 00:00:00.633307 | relu | relu | relu | NaN | NaN | 16 | 0.058445 | 3 | 1024 | PRUNED |
| 4999 | 4999 | 196.997955 | 2021-05-07 17:07:35.356339 | 2021-05-07 17:07:35.991640 | 0 days 00:00:00.635301 | relu | relu | relu | NaN | NaN | 16 | 0.001087 | 3 | 1024 | PRUNED |
5000 rows × 15 columns
pca_trials_df.to_pickle('dnn_trials/returning_test_loss/pca_25/' + pca_study_name + "_df.pkl")
# save the pca_study for resuming later:
joblib.dump(pca_study, "dnn_trials/returning_test_loss/pca_25/" + pca_study_name + "_study.pkl")
['dnn_trials/returning_test_loss/pca_25/PCA_25_all_hp_select_1st_layer_only_TPE_sampler_study.pkl']
graph = optuna.visualization.plot_parallel_coordinate(pca_study)
graph.write_image("dnn_trials/returning_test_loss/pca_25/" + pca_study_name + "_plot.jpeg")
graph.show()
optuna.visualization.plot_optimization_history(pca_study)
optuna.visualization.plot_intermediate_values(pca_study)
# load the PCA_study:
pca_study = joblib.load("dnn_trials/pca_25/PCA_25_all_hp_select_1st_layer_only_TPE_sampler_study.pkl")
print("Best trial until now:")
print(" Value: ", pca_study.best_trial.value)
print(" Params: ")
for key, value in pca_study.best_trial.params.items():
print(f" {key}: {value}")
Best trial until now:
Value: 154.99546813964844
Params:
lr: 0.0010009502114520618
batch_size: 16
n_layers: 3
neurons_HL1: 1024
HL0_ac_fn: relu
HL1_ac_fn: relu
HL2_ac_fn: relu
%%time
# Now we investigate the optimized model with 1000 epoch, and more metrics:
torch.manual_seed(42)
torch.cuda.manual_seed(42)
np.random.seed(42)
random.seed(42)
mape_train = []
mape_scores = []
rmse_train = []
rmse_scores = []
mae_train = []
mae_scores = []
r2_train = []
r2_scores = []
n_epochs = 1000
# src: https://stackoverflow.com/questions/45113245/how-to-get-mini-batches-in-pytorch-in-a-clean-and-efficient-way
def tuned_model_investigation(trial):
"""This function will take the best trial and run it with the same objective function as above.
However, this time, there are more scores that are returned. The model is also saved as pickle in this function for
use later."""
for key, value in trial.params.items():
print(f" {key}: {value}")
# set up GPU if available.
device = "cpu"
if torch.cuda.is_available():
device = "cuda:0"
# get learning_rate using optuna:
lr = trial.suggest_loguniform('lr', 1e-3, 1e-1)
# get batch_size using optuna:
batch_size = trial.suggest_categorical("batch_size", [16, 32, 64, 128])
# suggests n_layers from 2-5
n_layers = trial.suggest_int('n_layers', 2, 5)
layers = []
in_features = 25
out_features = 0
max_nrns = trial.suggest_int("neurons_HL1", 2, 1024, step=2)
for i in range(n_layers):
out_features = int(max_nrns)
layers.append(torch.nn.Linear(in_features, out_features))
activation = trial.suggest_categorical(f"HL{i}_ac_fn", ["relu", "linear"])
if activation == "relu":
layers.append(torch.nn.ReLU())
in_features = out_features
# to prevent the last layer being Linear(0,1)
if max_nrns > 2:
max_nrns = max_nrns/2
# a 'linear' activation function for the last layer is the same as no ac_fn at all.
# print(f"out_features={out_features}")
layers.append(torch.nn.Linear(out_features, 1))
pca_model = torch.nn.Sequential(*layers).to(device)
# now we save the model:
with open('dnn_trials/pca_25/' + pca_study_name + f"_trial{trial.number}.pickle", 'wb') as fout:
pickle.dump(pca_model, fout)
print(pca_model)
# use MAE as loss function (called L1Loss).
loss_fn = nn.L1Loss()
optimizer = optim.Adam(pca_model.parameters(), lr=lr)
for epoch in range(n_epochs):
# now we train the model:
# we create a random permutation of numbers from PCA_X_train.size()
permutation = torch.randperm(PCA_X_train.size()[0])
# we go through the batches.
for i in range(0, PCA_X_train.size()[0], batch_size):
indices = permutation[i:i+batch_size]
X_train_batch, Y_train_batch = PCA_X_train[indices], PCA_Y_train[indices]
# input PCA_X_train into dnn and get predictions.
train_prediction = pca_model(X_train_batch.to(device))
train_loss = loss_fn(train_prediction, Y_train_batch.to(device))
train_mape = MAPE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_rmse = RMSE_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
train_r2 = R2_pytorch(Y_train_batch.to(
'cpu'), train_prediction.to('cpu'))
optimizer.zero_grad()
# backpropagation
train_loss.backward()
optimizer.step()
mae_train.append(train_loss.item())
mape_train.append(train_mape.item())
rmse_train.append(train_rmse.item())
r2_train.append(train_r2.item())
# now we test the model:
test_prediction = pca_model(PCA_X_test.to(device))
test_loss = loss_fn(test_prediction, PCA_Y_test.to(device))
test_mape = MAPE_pytorch(PCA_Y_test.to('cpu'), test_prediction.to('cpu'))
test_rmse = RMSE_pytorch(PCA_Y_test.to('cpu'), test_prediction.to('cpu'))
test_r2 = R2_pytorch(PCA_Y_test.to('cpu'), test_prediction.to('cpu'))
mae_scores.append(test_loss.item())
mape_scores.append(test_mape.item())
rmse_scores.append(test_rmse.item())
r2_scores.append(test_r2.item())
print(f"Epoch{epoch+1}\ttrain_loss={train_loss};\ttest_loss={test_loss}")
# we return the thing we are trying to maximize or minimize (the MAE - our loss fn)
return mae_scores, mape_scores, rmse_scores, r2_scores
mae_scores, mape_scores, rmse_scores, r2_scores = tuned_model_investigation(pca_study.best_trial)
print("\n\n------------------------------------------- TRAINING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_train)} +/- {np.std(mae_train)}")
print(f"Overall RMSE: {np.mean(rmse_train)} +/- {np.std(rmse_train)}")
print(f"Overall MAPE: {np.mean(mape_train)} +/- {np.std(mape_train)}")
print(f"Overall R2: {np.mean(r2_train)*100} +/- {np.std(r2_train)*100}")
print("\n\n------------------------------------------- TESTING SCORES -------------------------------------------")
print(f"Overall MAE: {np.mean(mae_scores)} +/- {np.std(mae_scores)}")
print(f"Overall RMSE: {np.mean(rmse_scores)} +/- {np.std(rmse_scores)}")
print(f"Overall MAPE: {np.mean(mape_scores)} +/- {np.std(mape_scores)}")
print(f"Overall R2: {np.mean(r2_scores)*100} +/- {np.std(r2_scores)*100}\n\n")
lr: 0.0010009502114520618
batch_size: 16
n_layers: 3
neurons_HL1: 1024
HL0_ac_fn: relu
HL1_ac_fn: relu
HL2_ac_fn: relu
Sequential(
(0): Linear(in_features=25, out_features=1024, bias=True)
(1): ReLU()
(2): Linear(in_features=1024, out_features=512, bias=True)
(3): ReLU()
(4): Linear(in_features=512, out_features=256, bias=True)
(5): ReLU()
(6): Linear(in_features=256, out_features=1, bias=True)
)
Epoch1 train_loss=238.060791015625; test_loss=190.18316650390625
Epoch2 train_loss=256.3294982910156; test_loss=178.10316467285156
Epoch3 train_loss=534.600830078125; test_loss=179.71119689941406
Epoch4 train_loss=147.0652618408203; test_loss=173.8993377685547
Epoch5 train_loss=111.52363586425781; test_loss=177.64923095703125
Epoch6 train_loss=119.76525115966797; test_loss=182.44044494628906
Epoch7 train_loss=220.8156280517578; test_loss=178.5710906982422
Epoch8 train_loss=273.1591796875; test_loss=174.42657470703125
Epoch9 train_loss=87.76676940917969; test_loss=186.8607940673828
Epoch10 train_loss=53.003936767578125; test_loss=172.48956298828125
Epoch11 train_loss=36.970008850097656; test_loss=167.92111206054688
Epoch12 train_loss=73.73387908935547; test_loss=197.63441467285156
Epoch13 train_loss=242.87840270996094; test_loss=172.67994689941406
Epoch14 train_loss=62.521453857421875; test_loss=173.8530731201172
Epoch15 train_loss=288.72052001953125; test_loss=171.2710723876953
Epoch16 train_loss=86.14436340332031; test_loss=176.47128295898438
Epoch17 train_loss=194.66908264160156; test_loss=170.837158203125
Epoch18 train_loss=128.68472290039062; test_loss=174.42233276367188
Epoch19 train_loss=169.2903289794922; test_loss=172.6147918701172
Epoch20 train_loss=58.073299407958984; test_loss=172.2433319091797
Epoch21 train_loss=126.53437805175781; test_loss=173.6949462890625
Epoch22 train_loss=75.75344848632812; test_loss=177.96432495117188
Epoch23 train_loss=48.74827194213867; test_loss=170.3489990234375
Epoch24 train_loss=108.8748779296875; test_loss=166.70004272460938
Epoch25 train_loss=164.34230041503906; test_loss=166.37982177734375
Epoch26 train_loss=71.8978271484375; test_loss=163.55865478515625
Epoch27 train_loss=268.325439453125; test_loss=183.69674682617188
Epoch28 train_loss=100.8961181640625; test_loss=168.001220703125
Epoch29 train_loss=103.92818450927734; test_loss=166.14828491210938
Epoch30 train_loss=178.61947631835938; test_loss=178.9316864013672
Epoch31 train_loss=74.0322494506836; test_loss=165.7296600341797
Epoch32 train_loss=102.866455078125; test_loss=168.34568786621094
Epoch33 train_loss=135.51422119140625; test_loss=177.96401977539062
Epoch34 train_loss=311.0940856933594; test_loss=168.33042907714844
Epoch35 train_loss=137.49212646484375; test_loss=170.30105590820312
Epoch36 train_loss=281.0335998535156; test_loss=164.84962463378906
Epoch37 train_loss=126.16669464111328; test_loss=166.4260711669922
Epoch38 train_loss=197.8875732421875; test_loss=166.01358032226562
Epoch39 train_loss=321.004150390625; test_loss=169.2583770751953
Epoch40 train_loss=70.53218841552734; test_loss=168.78208923339844
Epoch41 train_loss=211.72706604003906; test_loss=164.51646423339844
Epoch42 train_loss=57.420265197753906; test_loss=166.6097412109375
Epoch43 train_loss=98.74879455566406; test_loss=160.4752655029297
Epoch44 train_loss=522.739990234375; test_loss=166.25137329101562
Epoch45 train_loss=233.49490356445312; test_loss=162.84494018554688
Epoch46 train_loss=211.82794189453125; test_loss=163.3433380126953
Epoch47 train_loss=131.93826293945312; test_loss=166.3177032470703
Epoch48 train_loss=138.8253936767578; test_loss=162.48716735839844
Epoch49 train_loss=199.81808471679688; test_loss=163.08485412597656
Epoch50 train_loss=149.80322265625; test_loss=182.93975830078125
Epoch51 train_loss=301.0743713378906; test_loss=162.96006774902344
Epoch52 train_loss=716.9097900390625; test_loss=169.11294555664062
Epoch53 train_loss=47.33763122558594; test_loss=164.77259826660156
Epoch54 train_loss=507.8851318359375; test_loss=164.76541137695312
Epoch55 train_loss=156.1690216064453; test_loss=168.0943145751953
Epoch56 train_loss=296.130615234375; test_loss=160.5338897705078
Epoch57 train_loss=93.93338775634766; test_loss=164.811767578125
Epoch58 train_loss=231.51168823242188; test_loss=173.3695526123047
Epoch59 train_loss=125.18246459960938; test_loss=161.4952850341797
Epoch60 train_loss=99.3521728515625; test_loss=180.6342010498047
Epoch61 train_loss=42.315185546875; test_loss=162.02001953125
Epoch62 train_loss=27.684589385986328; test_loss=159.24575805664062
Epoch63 train_loss=17.247638702392578; test_loss=163.449951171875
Epoch64 train_loss=43.207984924316406; test_loss=164.4384765625
Epoch65 train_loss=218.19110107421875; test_loss=162.5305938720703
Epoch66 train_loss=165.7554473876953; test_loss=171.08323669433594
Epoch67 train_loss=80.66804504394531; test_loss=174.09861755371094
Epoch68 train_loss=133.898681640625; test_loss=163.68826293945312
Epoch69 train_loss=61.315547943115234; test_loss=162.7822265625
Epoch70 train_loss=93.23925018310547; test_loss=164.73855590820312
Epoch71 train_loss=189.1687469482422; test_loss=167.75650024414062
Epoch72 train_loss=128.19761657714844; test_loss=157.95266723632812
Epoch73 train_loss=68.35477447509766; test_loss=159.1302947998047
Epoch74 train_loss=355.9836120605469; test_loss=160.01116943359375
Epoch75 train_loss=49.206939697265625; test_loss=161.21775817871094
Epoch76 train_loss=211.5587158203125; test_loss=158.83595275878906
Epoch77 train_loss=85.05738067626953; test_loss=160.1910400390625
Epoch78 train_loss=134.01754760742188; test_loss=163.70838928222656
Epoch79 train_loss=124.41273498535156; test_loss=170.45904541015625
Epoch80 train_loss=87.59191131591797; test_loss=163.64393615722656
Epoch81 train_loss=25.565650939941406; test_loss=162.8328399658203
Epoch82 train_loss=29.00312042236328; test_loss=159.5523681640625
Epoch83 train_loss=87.14800262451172; test_loss=158.61837768554688
Epoch84 train_loss=217.95428466796875; test_loss=166.2533721923828
Epoch85 train_loss=179.5815887451172; test_loss=158.79588317871094
Epoch86 train_loss=213.95208740234375; test_loss=157.74526977539062
Epoch87 train_loss=47.142982482910156; test_loss=163.7124481201172
Epoch88 train_loss=102.32046508789062; test_loss=165.5762481689453
Epoch89 train_loss=60.729827880859375; test_loss=160.16226196289062
Epoch90 train_loss=61.29521942138672; test_loss=159.28907775878906
Epoch91 train_loss=214.6334686279297; test_loss=158.78416442871094
Epoch92 train_loss=136.45578002929688; test_loss=157.98150634765625
Epoch93 train_loss=55.751495361328125; test_loss=155.92196655273438
Epoch94 train_loss=16.318283081054688; test_loss=172.03115844726562
Epoch95 train_loss=77.90341186523438; test_loss=159.10848999023438
Epoch96 train_loss=38.79527282714844; test_loss=157.85206604003906
Epoch97 train_loss=114.17554473876953; test_loss=158.3992919921875
Epoch98 train_loss=172.66062927246094; test_loss=158.61854553222656
Epoch99 train_loss=101.91363525390625; test_loss=157.02671813964844
Epoch100 train_loss=109.56448364257812; test_loss=158.21392822265625
Epoch101 train_loss=130.0523223876953; test_loss=160.32566833496094
Epoch102 train_loss=105.28055572509766; test_loss=158.33314514160156
Epoch103 train_loss=221.25758361816406; test_loss=158.89088439941406
Epoch104 train_loss=228.08848571777344; test_loss=155.70066833496094
Epoch105 train_loss=95.98029327392578; test_loss=161.02053833007812
Epoch106 train_loss=116.02095031738281; test_loss=159.21173095703125
Epoch107 train_loss=51.211151123046875; test_loss=162.9945831298828
Epoch108 train_loss=56.050140380859375; test_loss=159.90419006347656
Epoch109 train_loss=100.8288345336914; test_loss=156.87167358398438
Epoch110 train_loss=50.45649719238281; test_loss=158.5913543701172
Epoch111 train_loss=164.35067749023438; test_loss=158.3984832763672
Epoch112 train_loss=165.12213134765625; test_loss=158.14041137695312
Epoch113 train_loss=154.510009765625; test_loss=165.1421356201172
Epoch114 train_loss=243.39068603515625; test_loss=161.2287139892578
Epoch115 train_loss=48.88505554199219; test_loss=158.7473907470703
Epoch116 train_loss=137.02655029296875; test_loss=158.90338134765625
Epoch117 train_loss=120.42971801757812; test_loss=158.808349609375
Epoch118 train_loss=76.18019104003906; test_loss=154.43203735351562
Epoch119 train_loss=87.55830383300781; test_loss=159.34715270996094
Epoch120 train_loss=64.65831756591797; test_loss=159.2696533203125
Epoch121 train_loss=129.8675079345703; test_loss=156.0670166015625
Epoch122 train_loss=293.36126708984375; test_loss=160.78985595703125
Epoch123 train_loss=209.71861267089844; test_loss=163.113037109375
Epoch124 train_loss=36.567283630371094; test_loss=158.22610473632812
Epoch125 train_loss=58.32968521118164; test_loss=161.07293701171875
Epoch126 train_loss=54.55742645263672; test_loss=159.61585998535156
Epoch127 train_loss=74.37445068359375; test_loss=157.65003967285156
Epoch128 train_loss=33.97166442871094; test_loss=158.76602172851562
Epoch129 train_loss=105.8795166015625; test_loss=158.2593994140625
Epoch130 train_loss=133.41517639160156; test_loss=161.04031372070312
Epoch131 train_loss=92.33802795410156; test_loss=162.44644165039062
Epoch132 train_loss=73.720703125; test_loss=157.61605834960938
Epoch133 train_loss=116.98433685302734; test_loss=159.97386169433594
Epoch134 train_loss=113.62908935546875; test_loss=160.17970275878906
Epoch135 train_loss=550.2030029296875; test_loss=157.3284454345703
Epoch136 train_loss=44.80911636352539; test_loss=160.5409698486328
Epoch137 train_loss=224.27029418945312; test_loss=159.88055419921875
Epoch138 train_loss=62.39830780029297; test_loss=156.3241424560547
Epoch139 train_loss=157.71441650390625; test_loss=159.8809814453125
Epoch140 train_loss=321.21783447265625; test_loss=169.77072143554688
Epoch141 train_loss=57.10589599609375; test_loss=157.14816284179688
Epoch142 train_loss=241.0718536376953; test_loss=159.61590576171875
Epoch143 train_loss=38.83046340942383; test_loss=161.9173583984375
Epoch144 train_loss=124.59048461914062; test_loss=159.66397094726562
Epoch145 train_loss=91.45477294921875; test_loss=159.74562072753906
Epoch146 train_loss=120.83480072021484; test_loss=162.4714813232422
Epoch147 train_loss=225.2828369140625; test_loss=156.9985809326172
Epoch148 train_loss=554.47998046875; test_loss=155.28292846679688
Epoch149 train_loss=238.71914672851562; test_loss=156.46424865722656
Epoch150 train_loss=132.8780975341797; test_loss=164.83169555664062
Epoch151 train_loss=92.36970520019531; test_loss=157.82960510253906
Epoch152 train_loss=250.61181640625; test_loss=159.82228088378906
Epoch153 train_loss=103.1626968383789; test_loss=160.00094604492188
Epoch154 train_loss=34.13584518432617; test_loss=159.8314666748047
Epoch155 train_loss=52.07754898071289; test_loss=158.83493041992188
Epoch156 train_loss=133.79315185546875; test_loss=158.66555786132812
Epoch157 train_loss=53.3975830078125; test_loss=157.027587890625
Epoch158 train_loss=110.35773468017578; test_loss=158.4195098876953
Epoch159 train_loss=67.71244812011719; test_loss=159.787109375
Epoch160 train_loss=189.38067626953125; test_loss=160.46987915039062
Epoch161 train_loss=71.89793395996094; test_loss=158.26768493652344
Epoch162 train_loss=79.0572509765625; test_loss=160.56468200683594
Epoch163 train_loss=103.22097778320312; test_loss=164.08740234375
Epoch164 train_loss=162.3744354248047; test_loss=160.7611541748047
Epoch165 train_loss=146.8373260498047; test_loss=162.45046997070312
Epoch166 train_loss=104.84620666503906; test_loss=158.88427734375
Epoch167 train_loss=79.42488098144531; test_loss=159.322265625
Epoch168 train_loss=35.87999725341797; test_loss=156.94557189941406
Epoch169 train_loss=310.63055419921875; test_loss=158.60052490234375
Epoch170 train_loss=108.91458129882812; test_loss=155.82566833496094
Epoch171 train_loss=119.29476928710938; test_loss=158.95835876464844
Epoch172 train_loss=32.596343994140625; test_loss=156.68699645996094
Epoch173 train_loss=352.2717590332031; test_loss=159.9925994873047
Epoch174 train_loss=306.51593017578125; test_loss=157.615478515625
Epoch175 train_loss=99.74127197265625; test_loss=159.4600067138672
Epoch176 train_loss=139.19540405273438; test_loss=159.91795349121094
Epoch177 train_loss=160.24740600585938; test_loss=159.44149780273438
Epoch178 train_loss=77.8749008178711; test_loss=159.96791076660156
Epoch179 train_loss=144.416748046875; test_loss=159.84564208984375
Epoch180 train_loss=256.4136047363281; test_loss=159.24986267089844
Epoch181 train_loss=105.50320434570312; test_loss=157.29457092285156
Epoch182 train_loss=65.39044189453125; test_loss=156.63177490234375
Epoch183 train_loss=49.07676696777344; test_loss=156.80967712402344
Epoch184 train_loss=134.0091552734375; test_loss=158.88217163085938
Epoch185 train_loss=353.8418273925781; test_loss=158.19590759277344
Epoch186 train_loss=66.90171813964844; test_loss=159.4685821533203
Epoch187 train_loss=105.43417358398438; test_loss=162.27952575683594
Epoch188 train_loss=81.26814270019531; test_loss=162.56585693359375
Epoch189 train_loss=116.5601806640625; test_loss=160.52345275878906
Epoch190 train_loss=134.54107666015625; test_loss=162.43600463867188
Epoch191 train_loss=103.37025451660156; test_loss=160.64688110351562
Epoch192 train_loss=114.02582550048828; test_loss=160.43606567382812
Epoch193 train_loss=56.40245056152344; test_loss=160.5750732421875
Epoch194 train_loss=91.0223388671875; test_loss=158.03182983398438
Epoch195 train_loss=154.6014404296875; test_loss=163.52537536621094
Epoch196 train_loss=97.71273803710938; test_loss=159.2908172607422
Epoch197 train_loss=124.76631164550781; test_loss=159.8652801513672
Epoch198 train_loss=665.29052734375; test_loss=161.80288696289062
Epoch199 train_loss=145.4611358642578; test_loss=161.0760498046875
Epoch200 train_loss=133.38421630859375; test_loss=158.53616333007812
Epoch201 train_loss=240.52499389648438; test_loss=162.8979034423828
Epoch202 train_loss=79.08340454101562; test_loss=160.1356658935547
Epoch203 train_loss=233.0101318359375; test_loss=163.35125732421875
Epoch204 train_loss=80.70011901855469; test_loss=164.98114013671875
Epoch205 train_loss=74.11695098876953; test_loss=160.7938232421875
Epoch206 train_loss=57.36963653564453; test_loss=161.8104248046875
Epoch207 train_loss=90.99008178710938; test_loss=163.62599182128906
Epoch208 train_loss=177.38375854492188; test_loss=160.95626831054688
Epoch209 train_loss=58.62328338623047; test_loss=164.19488525390625
Epoch210 train_loss=381.87567138671875; test_loss=161.54039001464844
Epoch211 train_loss=51.06732940673828; test_loss=161.7758331298828
Epoch212 train_loss=100.4764175415039; test_loss=161.24777221679688
Epoch213 train_loss=52.934627532958984; test_loss=158.54257202148438
Epoch214 train_loss=103.97639465332031; test_loss=159.96359252929688
Epoch215 train_loss=118.06503295898438; test_loss=160.46144104003906
Epoch216 train_loss=78.25745391845703; test_loss=160.76507568359375
Epoch217 train_loss=51.200103759765625; test_loss=164.63645935058594
Epoch218 train_loss=43.357662200927734; test_loss=164.92262268066406
Epoch219 train_loss=68.70620727539062; test_loss=165.2705841064453
Epoch220 train_loss=43.39385223388672; test_loss=159.494384765625
Epoch221 train_loss=52.08007049560547; test_loss=166.40872192382812
Epoch222 train_loss=124.20181274414062; test_loss=160.347900390625
Epoch223 train_loss=86.78612518310547; test_loss=162.59938049316406
Epoch224 train_loss=37.23016357421875; test_loss=161.18890380859375
Epoch225 train_loss=48.22135925292969; test_loss=160.313720703125
Epoch226 train_loss=45.039268493652344; test_loss=160.4500732421875
Epoch227 train_loss=136.53717041015625; test_loss=165.82362365722656
Epoch228 train_loss=181.23583984375; test_loss=166.73782348632812
Epoch229 train_loss=110.04661560058594; test_loss=160.57081604003906
Epoch230 train_loss=77.08924865722656; test_loss=164.50672912597656
Epoch231 train_loss=195.94032287597656; test_loss=161.27671813964844
Epoch232 train_loss=36.8726806640625; test_loss=160.4677734375
Epoch233 train_loss=143.36659240722656; test_loss=161.7719268798828
Epoch234 train_loss=68.38050842285156; test_loss=165.2305145263672
Epoch235 train_loss=124.521728515625; test_loss=161.48440551757812
Epoch236 train_loss=419.22564697265625; test_loss=164.2892303466797
Epoch237 train_loss=98.87559509277344; test_loss=162.6111297607422
Epoch238 train_loss=156.68902587890625; test_loss=163.07199096679688
Epoch239 train_loss=145.06773376464844; test_loss=163.38818359375
Epoch240 train_loss=15.818851470947266; test_loss=161.24691772460938
Epoch241 train_loss=61.4508056640625; test_loss=163.17050170898438
Epoch242 train_loss=82.83416748046875; test_loss=165.00213623046875
Epoch243 train_loss=48.09809494018555; test_loss=166.48886108398438
Epoch244 train_loss=51.34288024902344; test_loss=162.07154846191406
Epoch245 train_loss=71.94486999511719; test_loss=165.62142944335938
Epoch246 train_loss=70.11810302734375; test_loss=162.66439819335938
Epoch247 train_loss=538.0011596679688; test_loss=167.59652709960938
Epoch248 train_loss=50.997039794921875; test_loss=165.50039672851562
Epoch249 train_loss=139.94406127929688; test_loss=163.91212463378906
Epoch250 train_loss=143.3040771484375; test_loss=165.92031860351562
Epoch251 train_loss=41.37518310546875; test_loss=165.59375
Epoch252 train_loss=137.5359344482422; test_loss=161.85723876953125
Epoch253 train_loss=50.52887725830078; test_loss=161.6602783203125
Epoch254 train_loss=54.188323974609375; test_loss=163.09056091308594
Epoch255 train_loss=143.68455505371094; test_loss=163.2849578857422
Epoch256 train_loss=43.16613006591797; test_loss=166.46005249023438
Epoch257 train_loss=118.98640441894531; test_loss=163.36854553222656
Epoch258 train_loss=85.40042877197266; test_loss=165.42539978027344
Epoch259 train_loss=137.0239715576172; test_loss=166.282470703125
Epoch260 train_loss=21.000045776367188; test_loss=164.7252960205078
Epoch261 train_loss=175.94187927246094; test_loss=165.67364501953125
Epoch262 train_loss=40.50865173339844; test_loss=165.03363037109375
Epoch263 train_loss=106.23435974121094; test_loss=166.00039672851562
Epoch264 train_loss=39.09770202636719; test_loss=162.69349670410156
Epoch265 train_loss=90.15544891357422; test_loss=164.5777130126953
Epoch266 train_loss=47.34986114501953; test_loss=165.1575927734375
Epoch267 train_loss=103.31550598144531; test_loss=163.7424774169922
Epoch268 train_loss=54.760555267333984; test_loss=167.1200408935547
Epoch269 train_loss=232.43759155273438; test_loss=163.8726348876953
Epoch270 train_loss=120.11141967773438; test_loss=165.4534454345703
Epoch271 train_loss=82.74765014648438; test_loss=166.2222442626953
Epoch272 train_loss=94.64924621582031; test_loss=164.60899353027344
Epoch273 train_loss=8.252494812011719; test_loss=165.25364685058594
Epoch274 train_loss=47.236846923828125; test_loss=164.4625244140625
Epoch275 train_loss=129.80386352539062; test_loss=165.23764038085938
Epoch276 train_loss=56.697303771972656; test_loss=166.36505126953125
Epoch277 train_loss=24.019119262695312; test_loss=163.72274780273438
Epoch278 train_loss=68.86126708984375; test_loss=163.34889221191406
Epoch279 train_loss=77.99732971191406; test_loss=164.52149963378906
Epoch280 train_loss=24.207046508789062; test_loss=163.9700469970703
Epoch281 train_loss=103.41069793701172; test_loss=163.92831420898438
Epoch282 train_loss=55.808128356933594; test_loss=167.67469787597656
Epoch283 train_loss=257.2516784667969; test_loss=165.2332000732422
Epoch284 train_loss=52.182769775390625; test_loss=163.6233367919922
Epoch285 train_loss=178.13352966308594; test_loss=163.8647003173828
Epoch286 train_loss=107.52255249023438; test_loss=164.33587646484375
Epoch287 train_loss=42.370201110839844; test_loss=166.8870391845703
Epoch288 train_loss=81.15611267089844; test_loss=164.9365234375
Epoch289 train_loss=65.62229919433594; test_loss=168.9171600341797
Epoch290 train_loss=71.70591735839844; test_loss=165.48696899414062
Epoch291 train_loss=309.80902099609375; test_loss=168.762451171875
Epoch292 train_loss=44.8223876953125; test_loss=169.04861450195312
Epoch293 train_loss=48.82261657714844; test_loss=166.4503173828125
Epoch294 train_loss=79.10426330566406; test_loss=165.0809783935547
Epoch295 train_loss=22.084014892578125; test_loss=167.6000518798828
Epoch296 train_loss=45.92680358886719; test_loss=170.85269165039062
Epoch297 train_loss=52.94047164916992; test_loss=165.74000549316406
Epoch298 train_loss=49.44734191894531; test_loss=170.86724853515625
Epoch299 train_loss=104.89131164550781; test_loss=167.94602966308594
Epoch300 train_loss=74.9501953125; test_loss=169.84902954101562
Epoch301 train_loss=25.42512321472168; test_loss=165.58084106445312
Epoch302 train_loss=123.76353454589844; test_loss=169.36782836914062
Epoch303 train_loss=117.66578674316406; test_loss=166.59750366210938
Epoch304 train_loss=33.27180480957031; test_loss=170.88694763183594
Epoch305 train_loss=43.3107795715332; test_loss=168.18963623046875
Epoch306 train_loss=58.603370666503906; test_loss=170.17893981933594
Epoch307 train_loss=120.15745544433594; test_loss=165.2846221923828
Epoch308 train_loss=67.94783020019531; test_loss=169.47421264648438
Epoch309 train_loss=71.55928802490234; test_loss=168.1537322998047
Epoch310 train_loss=70.60514068603516; test_loss=168.52218627929688
Epoch311 train_loss=70.9620590209961; test_loss=166.55853271484375
Epoch312 train_loss=95.70828247070312; test_loss=168.96337890625
Epoch313 train_loss=29.315521240234375; test_loss=167.1298370361328
Epoch314 train_loss=171.4241485595703; test_loss=170.26470947265625
Epoch315 train_loss=62.141845703125; test_loss=167.4912109375
Epoch316 train_loss=31.611888885498047; test_loss=170.83335876464844
Epoch317 train_loss=81.76210021972656; test_loss=165.76025390625
Epoch318 train_loss=46.29903030395508; test_loss=168.40988159179688
Epoch319 train_loss=253.44369506835938; test_loss=168.27259826660156
Epoch320 train_loss=95.84651947021484; test_loss=168.9790496826172
Epoch321 train_loss=27.164459228515625; test_loss=168.91761779785156
Epoch322 train_loss=52.27085876464844; test_loss=169.190185546875
Epoch323 train_loss=100.15142822265625; test_loss=165.40286254882812
Epoch324 train_loss=41.85301971435547; test_loss=165.9451141357422
Epoch325 train_loss=75.94235229492188; test_loss=167.3005828857422
Epoch326 train_loss=96.14419555664062; test_loss=167.0187225341797
Epoch327 train_loss=86.00018310546875; test_loss=168.09132385253906
Epoch328 train_loss=55.80319595336914; test_loss=168.70005798339844
Epoch329 train_loss=28.098915100097656; test_loss=170.359375
Epoch330 train_loss=54.618507385253906; test_loss=170.00250244140625
Epoch331 train_loss=284.3876953125; test_loss=168.1603240966797
Epoch332 train_loss=93.48981475830078; test_loss=166.41976928710938
Epoch333 train_loss=47.77869415283203; test_loss=170.48629760742188
Epoch334 train_loss=69.5302734375; test_loss=170.24143981933594
Epoch335 train_loss=98.32146453857422; test_loss=166.22213745117188
Epoch336 train_loss=80.68159484863281; test_loss=173.1396484375
Epoch337 train_loss=98.18598937988281; test_loss=169.0738525390625
Epoch338 train_loss=64.12934875488281; test_loss=168.1130828857422
Epoch339 train_loss=102.62425231933594; test_loss=169.23622131347656
Epoch340 train_loss=34.492942810058594; test_loss=166.96551513671875
Epoch341 train_loss=121.06163024902344; test_loss=165.5964813232422
Epoch342 train_loss=51.779075622558594; test_loss=168.5289764404297
Epoch343 train_loss=232.89012145996094; test_loss=169.34165954589844
Epoch344 train_loss=36.88310241699219; test_loss=169.33604431152344
Epoch345 train_loss=231.95359802246094; test_loss=169.6326904296875
Epoch346 train_loss=40.48263931274414; test_loss=169.2100830078125
Epoch347 train_loss=83.56976318359375; test_loss=169.4661865234375
Epoch348 train_loss=65.04328918457031; test_loss=170.45826721191406
Epoch349 train_loss=21.766456604003906; test_loss=169.97071838378906
Epoch350 train_loss=37.17177200317383; test_loss=169.39553833007812
Epoch351 train_loss=146.6077880859375; test_loss=170.45626831054688
Epoch352 train_loss=32.619258880615234; test_loss=167.72747802734375
Epoch353 train_loss=42.64434051513672; test_loss=167.86386108398438
Epoch354 train_loss=120.5677719116211; test_loss=170.2579345703125
Epoch355 train_loss=45.71985626220703; test_loss=169.1156005859375
Epoch356 train_loss=91.1574935913086; test_loss=170.88226318359375
Epoch357 train_loss=44.15870666503906; test_loss=167.77239990234375
Epoch358 train_loss=37.184959411621094; test_loss=169.7239227294922
Epoch359 train_loss=85.15328979492188; test_loss=169.34947204589844
Epoch360 train_loss=30.560096740722656; test_loss=171.45111083984375
Epoch361 train_loss=36.72837829589844; test_loss=170.81854248046875
Epoch362 train_loss=68.07805633544922; test_loss=167.94683837890625
Epoch363 train_loss=84.90083312988281; test_loss=171.23907470703125
Epoch364 train_loss=97.43453216552734; test_loss=174.04061889648438
Epoch365 train_loss=37.33732604980469; test_loss=171.0713653564453
Epoch366 train_loss=33.05085372924805; test_loss=169.9374237060547
Epoch367 train_loss=63.824462890625; test_loss=168.3592529296875
Epoch368 train_loss=155.1480712890625; test_loss=171.78652954101562
Epoch369 train_loss=150.42864990234375; test_loss=166.4392547607422
Epoch370 train_loss=85.56716918945312; test_loss=167.70909118652344
Epoch371 train_loss=29.940322875976562; test_loss=171.6311798095703
Epoch372 train_loss=38.114688873291016; test_loss=169.37478637695312
Epoch373 train_loss=68.68817901611328; test_loss=170.64260864257812
Epoch374 train_loss=21.102230072021484; test_loss=172.18704223632812
Epoch375 train_loss=651.6910400390625; test_loss=168.85614013671875
Epoch376 train_loss=97.07269287109375; test_loss=171.7290802001953
Epoch377 train_loss=64.86953735351562; test_loss=171.57427978515625
Epoch378 train_loss=144.83555603027344; test_loss=171.52206420898438
Epoch379 train_loss=449.6192932128906; test_loss=172.49673461914062
Epoch380 train_loss=244.23727416992188; test_loss=172.85470581054688
Epoch381 train_loss=17.16619873046875; test_loss=171.8023681640625
Epoch382 train_loss=87.444580078125; test_loss=170.467529296875
Epoch383 train_loss=35.52691650390625; test_loss=171.08729553222656
Epoch384 train_loss=52.68663787841797; test_loss=171.41954040527344
Epoch385 train_loss=20.49043083190918; test_loss=170.42942810058594
Epoch386 train_loss=118.25314331054688; test_loss=168.94546508789062
Epoch387 train_loss=49.656368255615234; test_loss=173.57235717773438
Epoch388 train_loss=25.499298095703125; test_loss=170.48428344726562
Epoch389 train_loss=48.439239501953125; test_loss=172.1377716064453
Epoch390 train_loss=38.959327697753906; test_loss=173.40814208984375
Epoch391 train_loss=222.16403198242188; test_loss=174.66659545898438
Epoch392 train_loss=10.024887084960938; test_loss=175.0425262451172
Epoch393 train_loss=73.99769592285156; test_loss=173.42112731933594
Epoch394 train_loss=182.33856201171875; test_loss=173.09324645996094
Epoch395 train_loss=64.21620178222656; test_loss=174.95851135253906
Epoch396 train_loss=32.62371826171875; test_loss=173.4033966064453
Epoch397 train_loss=25.811134338378906; test_loss=171.62648010253906
Epoch398 train_loss=71.99649047851562; test_loss=173.7876434326172
Epoch399 train_loss=42.68525314331055; test_loss=175.33657836914062
Epoch400 train_loss=48.76263427734375; test_loss=171.42713928222656
Epoch401 train_loss=18.363224029541016; test_loss=171.42642211914062
Epoch402 train_loss=23.836002349853516; test_loss=171.37612915039062
Epoch403 train_loss=80.09860229492188; test_loss=169.95120239257812
Epoch404 train_loss=10.26932144165039; test_loss=172.6646728515625
Epoch405 train_loss=11.231868743896484; test_loss=174.0965576171875
Epoch406 train_loss=50.27619552612305; test_loss=172.66053771972656
Epoch407 train_loss=69.85014343261719; test_loss=169.78314208984375
Epoch408 train_loss=292.7518310546875; test_loss=173.04847717285156
Epoch409 train_loss=26.25628662109375; test_loss=173.54281616210938
Epoch410 train_loss=23.650096893310547; test_loss=172.06961059570312
Epoch411 train_loss=3.9356517791748047; test_loss=174.07073974609375
Epoch412 train_loss=32.812591552734375; test_loss=174.3164520263672
Epoch413 train_loss=22.34130859375; test_loss=173.76051330566406
Epoch414 train_loss=53.00023651123047; test_loss=173.66189575195312
Epoch415 train_loss=11.359954833984375; test_loss=171.60093688964844
Epoch416 train_loss=43.233917236328125; test_loss=171.32504272460938
Epoch417 train_loss=125.85855102539062; test_loss=174.0502471923828
Epoch418 train_loss=9.708328247070312; test_loss=173.7288818359375
Epoch419 train_loss=161.62489318847656; test_loss=171.71189880371094
Epoch420 train_loss=35.83368682861328; test_loss=173.90463256835938
Epoch421 train_loss=39.4964599609375; test_loss=172.66583251953125
Epoch422 train_loss=99.72056579589844; test_loss=172.85557556152344
Epoch423 train_loss=58.632293701171875; test_loss=176.42274475097656
Epoch424 train_loss=112.95845031738281; test_loss=175.05601501464844
Epoch425 train_loss=303.6446838378906; test_loss=172.40200805664062
Epoch426 train_loss=17.991302490234375; test_loss=172.77915954589844
Epoch427 train_loss=48.311737060546875; test_loss=174.23260498046875
Epoch428 train_loss=48.922584533691406; test_loss=174.2199249267578
Epoch429 train_loss=121.51145935058594; test_loss=173.8807830810547
Epoch430 train_loss=67.52146911621094; test_loss=174.05410766601562
Epoch431 train_loss=120.44912719726562; test_loss=173.70777893066406
Epoch432 train_loss=66.2193603515625; test_loss=175.06710815429688
Epoch433 train_loss=59.649017333984375; test_loss=171.91326904296875
Epoch434 train_loss=113.17527770996094; test_loss=175.95855712890625
Epoch435 train_loss=37.633209228515625; test_loss=172.8894500732422
Epoch436 train_loss=130.2121124267578; test_loss=176.7994384765625
Epoch437 train_loss=63.3221435546875; test_loss=173.89907836914062
Epoch438 train_loss=31.207923889160156; test_loss=176.8642578125
Epoch439 train_loss=25.288177490234375; test_loss=173.45692443847656
Epoch440 train_loss=95.48377990722656; test_loss=174.78038024902344
Epoch441 train_loss=68.65547180175781; test_loss=175.14590454101562
Epoch442 train_loss=40.906715393066406; test_loss=173.83021545410156
Epoch443 train_loss=34.91472625732422; test_loss=173.9217071533203
Epoch444 train_loss=78.25755310058594; test_loss=174.2462158203125
Epoch445 train_loss=91.76750946044922; test_loss=174.28622436523438
Epoch446 train_loss=167.89468383789062; test_loss=174.6183624267578
Epoch447 train_loss=108.82850646972656; test_loss=173.7212677001953
Epoch448 train_loss=43.415496826171875; test_loss=173.62059020996094
Epoch449 train_loss=149.28204345703125; test_loss=175.66806030273438
Epoch450 train_loss=285.515625; test_loss=176.02267456054688
Epoch451 train_loss=54.87152099609375; test_loss=173.9006805419922
Epoch452 train_loss=20.89702606201172; test_loss=173.20071411132812
Epoch453 train_loss=65.85221862792969; test_loss=174.15792846679688
Epoch454 train_loss=12.268745422363281; test_loss=177.22598266601562
Epoch455 train_loss=75.25263977050781; test_loss=174.36402893066406
Epoch456 train_loss=28.56720733642578; test_loss=177.1024932861328
Epoch457 train_loss=30.89740753173828; test_loss=173.5244140625
Epoch458 train_loss=41.55109405517578; test_loss=177.29141235351562
Epoch459 train_loss=77.67739868164062; test_loss=177.2093048095703
Epoch460 train_loss=238.58197021484375; test_loss=177.1354522705078
Epoch461 train_loss=51.45765686035156; test_loss=174.625244140625
Epoch462 train_loss=40.233734130859375; test_loss=174.91262817382812
Epoch463 train_loss=40.749183654785156; test_loss=173.0440216064453
Epoch464 train_loss=103.98725128173828; test_loss=178.12814331054688
Epoch465 train_loss=70.74861145019531; test_loss=176.26535034179688
Epoch466 train_loss=60.284698486328125; test_loss=177.03196716308594
Epoch467 train_loss=86.88014221191406; test_loss=176.2725067138672
Epoch468 train_loss=51.03990173339844; test_loss=179.762451171875
Epoch469 train_loss=119.7851333618164; test_loss=174.68072509765625
Epoch470 train_loss=66.31498718261719; test_loss=172.68194580078125
Epoch471 train_loss=53.61720275878906; test_loss=175.52256774902344
Epoch472 train_loss=49.62330627441406; test_loss=173.930419921875
Epoch473 train_loss=49.65968322753906; test_loss=178.26693725585938
Epoch474 train_loss=116.63571166992188; test_loss=174.32589721679688
Epoch475 train_loss=65.58152770996094; test_loss=177.7643280029297
Epoch476 train_loss=81.67178344726562; test_loss=175.73085021972656
Epoch477 train_loss=17.052274703979492; test_loss=174.55569458007812
Epoch478 train_loss=21.910079956054688; test_loss=173.81060791015625
Epoch479 train_loss=48.360870361328125; test_loss=173.79469299316406
Epoch480 train_loss=49.3350830078125; test_loss=174.243408203125
Epoch481 train_loss=7.4915924072265625; test_loss=174.82669067382812
Epoch482 train_loss=26.75656509399414; test_loss=176.58206176757812
Epoch483 train_loss=38.02507019042969; test_loss=176.2366180419922
Epoch484 train_loss=126.54496765136719; test_loss=177.16586303710938
Epoch485 train_loss=22.64981460571289; test_loss=176.31683349609375
Epoch486 train_loss=23.802167892456055; test_loss=176.15953063964844
Epoch487 train_loss=371.6445617675781; test_loss=176.13917541503906
Epoch488 train_loss=71.90348052978516; test_loss=175.4840850830078
Epoch489 train_loss=30.37781524658203; test_loss=175.16062927246094
Epoch490 train_loss=12.287586212158203; test_loss=174.386474609375
Epoch491 train_loss=32.03809356689453; test_loss=178.2367401123047
Epoch492 train_loss=131.8901824951172; test_loss=177.28285217285156
Epoch493 train_loss=21.09915542602539; test_loss=179.62416076660156
Epoch494 train_loss=95.9865493774414; test_loss=177.2498779296875
Epoch495 train_loss=13.897895812988281; test_loss=176.77005004882812
Epoch496 train_loss=34.492591857910156; test_loss=174.93478393554688
Epoch497 train_loss=342.1580810546875; test_loss=175.1844482421875
Epoch498 train_loss=124.45417785644531; test_loss=174.54258728027344
Epoch499 train_loss=166.4563751220703; test_loss=177.6175537109375
Epoch500 train_loss=118.14791870117188; test_loss=178.2329559326172
Epoch501 train_loss=20.106647491455078; test_loss=175.2967529296875
Epoch502 train_loss=50.586639404296875; test_loss=177.84945678710938
Epoch503 train_loss=138.6582489013672; test_loss=177.15921020507812
Epoch504 train_loss=25.33502197265625; test_loss=174.58543395996094
Epoch505 train_loss=63.40105438232422; test_loss=175.99810791015625
Epoch506 train_loss=33.93169403076172; test_loss=177.54373168945312
Epoch507 train_loss=126.98731231689453; test_loss=176.58326721191406
Epoch508 train_loss=102.17469787597656; test_loss=180.15982055664062
Epoch509 train_loss=37.8177490234375; test_loss=175.190673828125
Epoch510 train_loss=11.866378784179688; test_loss=173.8881072998047
Epoch511 train_loss=40.91461181640625; test_loss=178.57273864746094
Epoch512 train_loss=29.727996826171875; test_loss=176.05029296875
Epoch513 train_loss=43.45137023925781; test_loss=177.654541015625
Epoch514 train_loss=37.15396499633789; test_loss=176.2687225341797
Epoch515 train_loss=55.15315246582031; test_loss=175.941162109375
Epoch516 train_loss=22.085113525390625; test_loss=178.11251831054688
Epoch517 train_loss=8.678455352783203; test_loss=176.14122009277344
Epoch518 train_loss=73.19779205322266; test_loss=175.6313934326172
Epoch519 train_loss=73.63369750976562; test_loss=176.11322021484375
Epoch520 train_loss=17.554718017578125; test_loss=176.7405548095703
Epoch521 train_loss=27.39586639404297; test_loss=177.89678955078125
Epoch522 train_loss=35.089752197265625; test_loss=179.230224609375
Epoch523 train_loss=50.63670349121094; test_loss=179.40834045410156
Epoch524 train_loss=31.35003662109375; test_loss=176.6932373046875
Epoch525 train_loss=84.21881103515625; test_loss=175.52548217773438
Epoch526 train_loss=52.33378601074219; test_loss=178.19285583496094
Epoch527 train_loss=39.57532501220703; test_loss=174.75413513183594
Epoch528 train_loss=78.18312072753906; test_loss=182.92697143554688
Epoch529 train_loss=50.16670227050781; test_loss=176.91864013671875
Epoch530 train_loss=21.91246795654297; test_loss=176.22845458984375
Epoch531 train_loss=36.831382751464844; test_loss=177.0080108642578
Epoch532 train_loss=26.493759155273438; test_loss=180.4340057373047
Epoch533 train_loss=10.451793670654297; test_loss=177.06869506835938
Epoch534 train_loss=128.17449951171875; test_loss=181.56549072265625
Epoch535 train_loss=35.27262878417969; test_loss=176.0107421875
Epoch536 train_loss=117.38885498046875; test_loss=178.51893615722656
Epoch537 train_loss=122.7394790649414; test_loss=179.42169189453125
Epoch538 train_loss=15.10076904296875; test_loss=176.70872497558594
Epoch539 train_loss=17.699478149414062; test_loss=174.83021545410156
Epoch540 train_loss=87.48622131347656; test_loss=179.8763427734375
Epoch541 train_loss=82.32235717773438; test_loss=176.02830505371094
Epoch542 train_loss=29.396888732910156; test_loss=175.33250427246094
Epoch543 train_loss=64.45785522460938; test_loss=177.53358459472656
Epoch544 train_loss=15.96774673461914; test_loss=176.35122680664062
Epoch545 train_loss=77.40728759765625; test_loss=174.1215057373047
Epoch546 train_loss=89.6740951538086; test_loss=178.76199340820312
Epoch547 train_loss=41.44548034667969; test_loss=180.9471435546875
Epoch548 train_loss=69.86283874511719; test_loss=174.84666442871094
Epoch549 train_loss=3.4036521911621094; test_loss=179.18605041503906
Epoch550 train_loss=53.80788040161133; test_loss=177.17990112304688
Epoch551 train_loss=56.4425048828125; test_loss=179.83078002929688
Epoch552 train_loss=46.25654602050781; test_loss=176.5543670654297
Epoch553 train_loss=121.42350769042969; test_loss=178.8220977783203
Epoch554 train_loss=58.059295654296875; test_loss=179.5970458984375
Epoch555 train_loss=24.600875854492188; test_loss=178.1504669189453
Epoch556 train_loss=43.73036193847656; test_loss=176.3380126953125
Epoch557 train_loss=42.85166549682617; test_loss=177.05528259277344
Epoch558 train_loss=18.613243103027344; test_loss=177.4453887939453
Epoch559 train_loss=148.19784545898438; test_loss=177.7945556640625
Epoch560 train_loss=39.614280700683594; test_loss=180.0534210205078
Epoch561 train_loss=30.712142944335938; test_loss=179.33041381835938
Epoch562 train_loss=47.83537292480469; test_loss=176.7194366455078
Epoch563 train_loss=17.323230743408203; test_loss=180.86207580566406
Epoch564 train_loss=24.6046142578125; test_loss=178.88743591308594
Epoch565 train_loss=9.665363311767578; test_loss=179.13723754882812
Epoch566 train_loss=13.44196891784668; test_loss=181.41717529296875
Epoch567 train_loss=99.36666870117188; test_loss=178.04531860351562
Epoch568 train_loss=10.574630737304688; test_loss=173.85226440429688
Epoch569 train_loss=130.06088256835938; test_loss=178.20877075195312
Epoch570 train_loss=43.910125732421875; test_loss=176.20358276367188
Epoch571 train_loss=105.021240234375; test_loss=178.94752502441406
Epoch572 train_loss=68.92739868164062; test_loss=176.85086059570312
Epoch573 train_loss=24.134292602539062; test_loss=177.82467651367188
Epoch574 train_loss=148.56825256347656; test_loss=178.9824676513672
Epoch575 train_loss=60.26908874511719; test_loss=179.9601287841797
Epoch576 train_loss=102.53303527832031; test_loss=177.39385986328125
Epoch577 train_loss=16.92424774169922; test_loss=177.78970336914062
Epoch578 train_loss=11.124603271484375; test_loss=178.56707763671875
Epoch579 train_loss=57.51811218261719; test_loss=180.77549743652344
Epoch580 train_loss=21.649688720703125; test_loss=180.33631896972656
Epoch581 train_loss=28.134422302246094; test_loss=181.59312438964844
Epoch582 train_loss=90.49729919433594; test_loss=179.4241485595703
Epoch583 train_loss=37.275081634521484; test_loss=178.05636596679688
Epoch584 train_loss=106.92524719238281; test_loss=180.48338317871094
Epoch585 train_loss=44.83830261230469; test_loss=177.77761840820312
Epoch586 train_loss=502.4709777832031; test_loss=180.41053771972656
Epoch587 train_loss=58.686004638671875; test_loss=181.56634521484375
Epoch588 train_loss=13.600578308105469; test_loss=179.20062255859375
Epoch589 train_loss=40.28722381591797; test_loss=177.64833068847656
Epoch590 train_loss=3.2741622924804688; test_loss=179.814697265625
Epoch591 train_loss=117.1087646484375; test_loss=180.48194885253906
Epoch592 train_loss=22.65008544921875; test_loss=178.06619262695312
Epoch593 train_loss=116.24861145019531; test_loss=178.25267028808594
Epoch594 train_loss=5.8939056396484375; test_loss=179.74159240722656
Epoch595 train_loss=13.435226440429688; test_loss=177.3179931640625
Epoch596 train_loss=99.27239227294922; test_loss=181.1459197998047
Epoch597 train_loss=56.7760009765625; test_loss=179.24078369140625
Epoch598 train_loss=39.13218688964844; test_loss=179.42848205566406
Epoch599 train_loss=102.84944152832031; test_loss=179.4885711669922
Epoch600 train_loss=20.994075775146484; test_loss=178.3615264892578
Epoch601 train_loss=104.61325073242188; test_loss=180.74363708496094
Epoch602 train_loss=98.77122497558594; test_loss=179.31625366210938
Epoch603 train_loss=154.9198455810547; test_loss=179.6530303955078
Epoch604 train_loss=13.752750396728516; test_loss=178.5911407470703
Epoch605 train_loss=20.22504425048828; test_loss=177.07235717773438
Epoch606 train_loss=31.17144775390625; test_loss=178.4165802001953
Epoch607 train_loss=59.12248992919922; test_loss=178.8462371826172
Epoch608 train_loss=75.84475708007812; test_loss=176.67340087890625
Epoch609 train_loss=34.399085998535156; test_loss=181.75900268554688
Epoch610 train_loss=19.996654510498047; test_loss=177.4740447998047
Epoch611 train_loss=25.18479347229004; test_loss=179.23104858398438
Epoch612 train_loss=48.096771240234375; test_loss=177.8555145263672
Epoch613 train_loss=9.851661682128906; test_loss=178.49183654785156
Epoch614 train_loss=20.153274536132812; test_loss=179.62350463867188
Epoch615 train_loss=21.825454711914062; test_loss=180.2394256591797
Epoch616 train_loss=11.52804183959961; test_loss=181.48648071289062
Epoch617 train_loss=22.338298797607422; test_loss=179.99154663085938
Epoch618 train_loss=8.522626876831055; test_loss=180.75662231445312
Epoch619 train_loss=46.242164611816406; test_loss=180.2157440185547
Epoch620 train_loss=60.058135986328125; test_loss=178.8498077392578
Epoch621 train_loss=35.12834167480469; test_loss=181.0172882080078
Epoch622 train_loss=23.871604919433594; test_loss=180.80413818359375
Epoch623 train_loss=48.69908142089844; test_loss=179.6772003173828
Epoch624 train_loss=71.457275390625; test_loss=180.52627563476562
Epoch625 train_loss=132.7403564453125; test_loss=179.235595703125
Epoch626 train_loss=29.15428352355957; test_loss=182.8030242919922
Epoch627 train_loss=68.17522430419922; test_loss=181.56097412109375
Epoch628 train_loss=42.18573760986328; test_loss=178.29019165039062
Epoch629 train_loss=29.036834716796875; test_loss=177.8931427001953
Epoch630 train_loss=27.53791046142578; test_loss=179.616455078125
Epoch631 train_loss=19.262290954589844; test_loss=180.941162109375
Epoch632 train_loss=7.978054046630859; test_loss=179.86517333984375
Epoch633 train_loss=71.2423095703125; test_loss=180.66091918945312
Epoch634 train_loss=129.5078125; test_loss=177.0303192138672
Epoch635 train_loss=201.2117919921875; test_loss=179.4096221923828
Epoch636 train_loss=41.948997497558594; test_loss=180.1025848388672
Epoch637 train_loss=4.009761810302734; test_loss=180.98336791992188
Epoch638 train_loss=45.796207427978516; test_loss=180.03334045410156
Epoch639 train_loss=29.121543884277344; test_loss=179.81846618652344
Epoch640 train_loss=22.23711395263672; test_loss=179.40069580078125
Epoch641 train_loss=45.285797119140625; test_loss=182.01345825195312
Epoch642 train_loss=34.0364990234375; test_loss=180.01710510253906
Epoch643 train_loss=37.72361755371094; test_loss=179.45220947265625
Epoch644 train_loss=48.05066680908203; test_loss=178.71450805664062
Epoch645 train_loss=33.33262634277344; test_loss=180.29061889648438
Epoch646 train_loss=58.98408508300781; test_loss=179.079345703125
Epoch647 train_loss=22.78655242919922; test_loss=179.4206085205078
Epoch648 train_loss=31.859466552734375; test_loss=180.2731475830078
Epoch649 train_loss=20.871862411499023; test_loss=178.1663818359375
Epoch650 train_loss=14.614266395568848; test_loss=179.47837829589844
Epoch651 train_loss=38.887718200683594; test_loss=179.50369262695312
Epoch652 train_loss=26.950056076049805; test_loss=181.5589141845703
Epoch653 train_loss=28.851638793945312; test_loss=179.93118286132812
Epoch654 train_loss=496.24786376953125; test_loss=180.32275390625
Epoch655 train_loss=34.32536315917969; test_loss=181.82984924316406
Epoch656 train_loss=35.88492965698242; test_loss=178.6747283935547
Epoch657 train_loss=17.54257583618164; test_loss=180.4169464111328
Epoch658 train_loss=188.6493377685547; test_loss=180.2200164794922
Epoch659 train_loss=67.00502014160156; test_loss=180.52655029296875
Epoch660 train_loss=64.12516784667969; test_loss=180.3157958984375
Epoch661 train_loss=24.08700180053711; test_loss=180.34976196289062
Epoch662 train_loss=26.78470230102539; test_loss=180.31695556640625
Epoch663 train_loss=9.3272705078125; test_loss=181.11578369140625
Epoch664 train_loss=62.35246658325195; test_loss=179.25839233398438
Epoch665 train_loss=53.39305114746094; test_loss=176.613525390625
Epoch666 train_loss=21.713645935058594; test_loss=180.1531219482422
Epoch667 train_loss=47.44390869140625; test_loss=178.25868225097656
Epoch668 train_loss=16.178443908691406; test_loss=179.74522399902344
Epoch669 train_loss=41.764892578125; test_loss=179.92015075683594
Epoch670 train_loss=50.38896179199219; test_loss=179.52650451660156
Epoch671 train_loss=28.568267822265625; test_loss=181.37966918945312
Epoch672 train_loss=13.398284912109375; test_loss=180.6521453857422
Epoch673 train_loss=22.418373107910156; test_loss=179.32833862304688
Epoch674 train_loss=22.954906463623047; test_loss=182.0702362060547
Epoch675 train_loss=55.535423278808594; test_loss=180.0286407470703
Epoch676 train_loss=63.64006805419922; test_loss=179.53228759765625
Epoch677 train_loss=29.478126525878906; test_loss=179.70700073242188
Epoch678 train_loss=37.21464538574219; test_loss=180.5944366455078
Epoch679 train_loss=37.637901306152344; test_loss=180.0443572998047
Epoch680 train_loss=20.927764892578125; test_loss=180.4282684326172
Epoch681 train_loss=47.8309326171875; test_loss=183.25552368164062
Epoch682 train_loss=30.38943099975586; test_loss=181.44342041015625
Epoch683 train_loss=48.30055236816406; test_loss=181.5419921875
Epoch684 train_loss=50.58184814453125; test_loss=180.4508514404297
Epoch685 train_loss=29.761117935180664; test_loss=181.1874237060547
Epoch686 train_loss=149.9688720703125; test_loss=180.36013793945312
Epoch687 train_loss=36.211509704589844; test_loss=179.59616088867188
Epoch688 train_loss=53.34062957763672; test_loss=180.670166015625
Epoch689 train_loss=41.746826171875; test_loss=182.40646362304688
Epoch690 train_loss=33.16265106201172; test_loss=179.95352172851562
Epoch691 train_loss=38.05529022216797; test_loss=181.92947387695312
Epoch692 train_loss=37.95415115356445; test_loss=181.50466918945312
Epoch693 train_loss=50.75968933105469; test_loss=179.07778930664062
Epoch694 train_loss=82.75847625732422; test_loss=180.24095153808594
Epoch695 train_loss=13.369140625; test_loss=179.93869018554688
Epoch696 train_loss=100.94342041015625; test_loss=180.22515869140625
Epoch697 train_loss=9.963043212890625; test_loss=179.24801635742188
Epoch698 train_loss=23.647794723510742; test_loss=179.55026245117188
Epoch699 train_loss=25.76153564453125; test_loss=180.18246459960938
Epoch700 train_loss=38.30876159667969; test_loss=180.30831909179688
Epoch701 train_loss=65.17414855957031; test_loss=181.92105102539062
Epoch702 train_loss=35.140254974365234; test_loss=179.49806213378906
Epoch703 train_loss=28.838024139404297; test_loss=177.98666381835938
Epoch704 train_loss=33.86396408081055; test_loss=180.65432739257812
Epoch705 train_loss=31.802236557006836; test_loss=179.4673309326172
Epoch706 train_loss=199.55274963378906; test_loss=182.44529724121094
Epoch707 train_loss=16.19960594177246; test_loss=179.9009552001953
Epoch708 train_loss=155.62564086914062; test_loss=179.88848876953125
Epoch709 train_loss=20.288978576660156; test_loss=181.11083984375
Epoch710 train_loss=33.32386779785156; test_loss=181.09091186523438
Epoch711 train_loss=40.82609558105469; test_loss=182.2826690673828
Epoch712 train_loss=51.69579315185547; test_loss=181.9573974609375
Epoch713 train_loss=25.8021240234375; test_loss=178.65347290039062
Epoch714 train_loss=43.675628662109375; test_loss=180.7982940673828
Epoch715 train_loss=183.43080139160156; test_loss=179.91310119628906
Epoch716 train_loss=172.48056030273438; test_loss=180.93182373046875
Epoch717 train_loss=109.24237060546875; test_loss=180.39553833007812
Epoch718 train_loss=19.346294403076172; test_loss=180.93087768554688
Epoch719 train_loss=28.245880126953125; test_loss=179.04348754882812
Epoch720 train_loss=25.598873138427734; test_loss=180.40737915039062
Epoch721 train_loss=56.362823486328125; test_loss=178.34530639648438
Epoch722 train_loss=43.059696197509766; test_loss=179.2684783935547
Epoch723 train_loss=56.783050537109375; test_loss=178.7986297607422
Epoch724 train_loss=58.69343185424805; test_loss=181.9120635986328
Epoch725 train_loss=8.406341552734375; test_loss=179.2708740234375
Epoch726 train_loss=13.739959716796875; test_loss=180.62689208984375
Epoch727 train_loss=100.91372680664062; test_loss=179.51170349121094
Epoch728 train_loss=31.797040939331055; test_loss=180.6614990234375
Epoch729 train_loss=91.19268798828125; test_loss=180.71568298339844
Epoch730 train_loss=38.58390808105469; test_loss=179.2659912109375
Epoch731 train_loss=93.01862335205078; test_loss=181.01324462890625
Epoch732 train_loss=25.30931854248047; test_loss=181.5628204345703
Epoch733 train_loss=28.59686279296875; test_loss=180.40066528320312
Epoch734 train_loss=17.020729064941406; test_loss=181.4760284423828
Epoch735 train_loss=27.037185668945312; test_loss=179.05381774902344
Epoch736 train_loss=31.783157348632812; test_loss=179.2003173828125
Epoch737 train_loss=35.514923095703125; test_loss=179.57752990722656
Epoch738 train_loss=22.051063537597656; test_loss=180.33018493652344
Epoch739 train_loss=16.558326721191406; test_loss=180.7402801513672
Epoch740 train_loss=82.06352233886719; test_loss=181.0387420654297
Epoch741 train_loss=27.104633331298828; test_loss=180.74591064453125
Epoch742 train_loss=45.08363342285156; test_loss=179.83547973632812
Epoch743 train_loss=101.41943359375; test_loss=182.27664184570312
Epoch744 train_loss=9.910064697265625; test_loss=181.57177734375
Epoch745 train_loss=35.83551025390625; test_loss=179.51991271972656
Epoch746 train_loss=36.173980712890625; test_loss=179.6739044189453
Epoch747 train_loss=18.51300048828125; test_loss=182.41708374023438
Epoch748 train_loss=28.69432830810547; test_loss=182.7227783203125
Epoch749 train_loss=90.98785400390625; test_loss=180.44467163085938
Epoch750 train_loss=49.77885437011719; test_loss=181.07449340820312
Epoch751 train_loss=50.59523010253906; test_loss=181.66139221191406
Epoch752 train_loss=8.586894989013672; test_loss=181.3877716064453
Epoch753 train_loss=63.25159454345703; test_loss=181.46221923828125
Epoch754 train_loss=15.565410614013672; test_loss=178.69619750976562
Epoch755 train_loss=94.98011016845703; test_loss=179.28746032714844
Epoch756 train_loss=68.34304809570312; test_loss=179.5840301513672
Epoch757 train_loss=27.810760498046875; test_loss=181.31190490722656
Epoch758 train_loss=22.80937957763672; test_loss=181.56137084960938
Epoch759 train_loss=40.19200897216797; test_loss=181.02537536621094
Epoch760 train_loss=41.6751708984375; test_loss=181.46910095214844
Epoch761 train_loss=29.29883575439453; test_loss=181.69862365722656
Epoch762 train_loss=15.262359619140625; test_loss=181.67591857910156
Epoch763 train_loss=19.872356414794922; test_loss=180.4281463623047
Epoch764 train_loss=142.7845458984375; test_loss=177.8596649169922
Epoch765 train_loss=11.288932800292969; test_loss=180.33807373046875
Epoch766 train_loss=66.29197692871094; test_loss=180.5549774169922
Epoch767 train_loss=52.024932861328125; test_loss=182.89437866210938
Epoch768 train_loss=25.874759674072266; test_loss=181.5767364501953
Epoch769 train_loss=30.896347045898438; test_loss=182.5895538330078
Epoch770 train_loss=68.60565948486328; test_loss=184.4586944580078
Epoch771 train_loss=23.6402587890625; test_loss=183.2655487060547
Epoch772 train_loss=286.896240234375; test_loss=181.11337280273438
Epoch773 train_loss=30.985031127929688; test_loss=183.55931091308594
Epoch774 train_loss=63.34104919433594; test_loss=182.62362670898438
Epoch775 train_loss=32.21418762207031; test_loss=182.8715057373047
Epoch776 train_loss=11.092994689941406; test_loss=181.0697784423828
Epoch777 train_loss=39.48627471923828; test_loss=183.1483612060547
Epoch778 train_loss=82.29127502441406; test_loss=181.77420043945312
Epoch779 train_loss=64.13116455078125; test_loss=181.13340759277344
Epoch780 train_loss=51.051116943359375; test_loss=179.97886657714844
Epoch781 train_loss=41.84728240966797; test_loss=183.45480346679688
Epoch782 train_loss=79.68231201171875; test_loss=182.54136657714844
Epoch783 train_loss=50.33333206176758; test_loss=182.03704833984375
Epoch784 train_loss=145.73477172851562; test_loss=183.19857788085938
Epoch785 train_loss=35.122467041015625; test_loss=182.15194702148438
Epoch786 train_loss=19.441364288330078; test_loss=181.8913116455078
Epoch787 train_loss=60.64277648925781; test_loss=180.67257690429688
Epoch788 train_loss=67.84104919433594; test_loss=182.0056915283203
Epoch789 train_loss=26.96765899658203; test_loss=181.46705627441406
Epoch790 train_loss=73.7476806640625; test_loss=181.3175506591797
Epoch791 train_loss=24.129505157470703; test_loss=182.02394104003906
Epoch792 train_loss=33.51995849609375; test_loss=181.0866241455078
Epoch793 train_loss=10.194656372070312; test_loss=180.35777282714844
Epoch794 train_loss=39.17649841308594; test_loss=183.90464782714844
Epoch795 train_loss=25.458953857421875; test_loss=182.60598754882812
Epoch796 train_loss=28.503698348999023; test_loss=180.72415161132812
Epoch797 train_loss=74.30625915527344; test_loss=181.63543701171875
Epoch798 train_loss=38.09220886230469; test_loss=181.87266540527344
Epoch799 train_loss=32.91114044189453; test_loss=183.45721435546875
Epoch800 train_loss=20.94775390625; test_loss=181.84657287597656
Epoch801 train_loss=32.519500732421875; test_loss=183.3133544921875
Epoch802 train_loss=84.235107421875; test_loss=183.30528259277344
Epoch803 train_loss=9.678749084472656; test_loss=182.3968048095703
Epoch804 train_loss=41.34156799316406; test_loss=179.82754516601562
Epoch805 train_loss=18.171112060546875; test_loss=180.96055603027344
Epoch806 train_loss=30.042734146118164; test_loss=180.65570068359375
Epoch807 train_loss=52.452884674072266; test_loss=181.8223419189453
Epoch808 train_loss=30.488977432250977; test_loss=180.68133544921875
Epoch809 train_loss=324.5740966796875; test_loss=183.45606994628906
Epoch810 train_loss=8.557624816894531; test_loss=181.753662109375
Epoch811 train_loss=29.16961669921875; test_loss=182.61851501464844
Epoch812 train_loss=37.730743408203125; test_loss=182.0754852294922
Epoch813 train_loss=6.5333404541015625; test_loss=179.602783203125
Epoch814 train_loss=28.193721771240234; test_loss=180.98818969726562
Epoch815 train_loss=52.543731689453125; test_loss=182.45455932617188
Epoch816 train_loss=77.39108276367188; test_loss=184.41184997558594
Epoch817 train_loss=6.322620391845703; test_loss=183.49261474609375
Epoch818 train_loss=8.226287841796875; test_loss=181.18377685546875
Epoch819 train_loss=54.3370361328125; test_loss=181.1989288330078
Epoch820 train_loss=20.047943115234375; test_loss=181.00680541992188
Epoch821 train_loss=84.67640686035156; test_loss=182.6477813720703
Epoch822 train_loss=39.34449005126953; test_loss=182.65199279785156
Epoch823 train_loss=38.037506103515625; test_loss=181.63841247558594
Epoch824 train_loss=13.784421920776367; test_loss=180.8751220703125
Epoch825 train_loss=47.18489074707031; test_loss=180.0585479736328
Epoch826 train_loss=30.298141479492188; test_loss=180.03607177734375
Epoch827 train_loss=16.884140014648438; test_loss=181.7086181640625
Epoch828 train_loss=37.64410400390625; test_loss=181.8359832763672
Epoch829 train_loss=81.2386703491211; test_loss=179.97738647460938
Epoch830 train_loss=17.668704986572266; test_loss=180.298095703125
Epoch831 train_loss=23.5445556640625; test_loss=183.1582794189453
Epoch832 train_loss=26.808513641357422; test_loss=181.67083740234375
Epoch833 train_loss=36.63262939453125; test_loss=184.56573486328125
Epoch834 train_loss=71.65039825439453; test_loss=183.79969787597656
Epoch835 train_loss=19.00589370727539; test_loss=181.50108337402344
Epoch836 train_loss=15.794235229492188; test_loss=181.84339904785156
Epoch837 train_loss=63.75889587402344; test_loss=183.05686950683594
Epoch838 train_loss=44.89642333984375; test_loss=181.91575622558594
Epoch839 train_loss=57.76239013671875; test_loss=180.43417358398438
Epoch840 train_loss=48.87974548339844; test_loss=180.755859375
Epoch841 train_loss=42.54197692871094; test_loss=183.12063598632812
Epoch842 train_loss=48.233306884765625; test_loss=179.9999237060547
Epoch843 train_loss=41.344482421875; test_loss=179.85498046875
Epoch844 train_loss=13.938652038574219; test_loss=181.70298767089844
Epoch845 train_loss=65.92987060546875; test_loss=180.3446807861328
Epoch846 train_loss=68.80152893066406; test_loss=181.22640991210938
Epoch847 train_loss=67.39254760742188; test_loss=183.27069091796875
Epoch848 train_loss=16.784461975097656; test_loss=181.4152374267578
Epoch849 train_loss=34.50538635253906; test_loss=181.3732147216797
Epoch850 train_loss=50.216400146484375; test_loss=181.2099609375
Epoch851 train_loss=32.7360954284668; test_loss=180.5021209716797
Epoch852 train_loss=41.8597412109375; test_loss=181.95420837402344
Epoch853 train_loss=15.746997833251953; test_loss=180.68959045410156
Epoch854 train_loss=29.810802459716797; test_loss=180.7416229248047
Epoch855 train_loss=143.82374572753906; test_loss=182.78668212890625
Epoch856 train_loss=51.39887237548828; test_loss=181.01707458496094
Epoch857 train_loss=255.58233642578125; test_loss=183.4429931640625
Epoch858 train_loss=26.34075164794922; test_loss=181.8600616455078
Epoch859 train_loss=11.63174057006836; test_loss=184.09051513671875
Epoch860 train_loss=15.288307189941406; test_loss=183.73423767089844
Epoch861 train_loss=5.938514709472656; test_loss=181.53512573242188
Epoch862 train_loss=28.371078491210938; test_loss=181.5321807861328
Epoch863 train_loss=12.032516479492188; test_loss=182.7146759033203
Epoch864 train_loss=115.27310180664062; test_loss=180.99063110351562
Epoch865 train_loss=24.550079345703125; test_loss=180.36305236816406
Epoch866 train_loss=12.237336158752441; test_loss=182.64126586914062
Epoch867 train_loss=18.78738021850586; test_loss=179.24168395996094
Epoch868 train_loss=57.6351203918457; test_loss=181.44815063476562
Epoch869 train_loss=20.626327514648438; test_loss=181.7119598388672
Epoch870 train_loss=8.497062683105469; test_loss=182.11460876464844
Epoch871 train_loss=9.504188537597656; test_loss=183.65003967285156
Epoch872 train_loss=18.045944213867188; test_loss=181.05599975585938
Epoch873 train_loss=17.568225860595703; test_loss=180.87631225585938
Epoch874 train_loss=37.809532165527344; test_loss=183.00587463378906
Epoch875 train_loss=27.96654510498047; test_loss=183.26641845703125
Epoch876 train_loss=88.86166381835938; test_loss=181.6691436767578
Epoch877 train_loss=27.386930465698242; test_loss=181.23004150390625
Epoch878 train_loss=20.696258544921875; test_loss=179.23046875
Epoch879 train_loss=33.996952056884766; test_loss=181.04830932617188
Epoch880 train_loss=13.402374267578125; test_loss=180.35879516601562
Epoch881 train_loss=65.96980285644531; test_loss=180.21713256835938
Epoch882 train_loss=187.49658203125; test_loss=182.65402221679688
Epoch883 train_loss=24.763561248779297; test_loss=180.32688903808594
Epoch884 train_loss=37.1630973815918; test_loss=180.8330078125
Epoch885 train_loss=17.427093505859375; test_loss=182.93209838867188
Epoch886 train_loss=43.3507080078125; test_loss=181.88731384277344
Epoch887 train_loss=42.087554931640625; test_loss=181.90768432617188
Epoch888 train_loss=10.084075927734375; test_loss=183.49490356445312
Epoch889 train_loss=29.487060546875; test_loss=181.5633087158203
Epoch890 train_loss=26.508155822753906; test_loss=181.9501190185547
Epoch891 train_loss=8.35101318359375; test_loss=182.0661163330078
Epoch892 train_loss=56.20399856567383; test_loss=181.96083068847656
Epoch893 train_loss=15.042144775390625; test_loss=183.40260314941406
Epoch894 train_loss=15.156143188476562; test_loss=182.90411376953125
Epoch895 train_loss=23.729736328125; test_loss=181.18077087402344
Epoch896 train_loss=24.82982635498047; test_loss=183.22634887695312
Epoch897 train_loss=19.90143585205078; test_loss=183.5799560546875
Epoch898 train_loss=22.181167602539062; test_loss=180.12583923339844
Epoch899 train_loss=25.696380615234375; test_loss=181.9327850341797
Epoch900 train_loss=237.16543579101562; test_loss=184.02218627929688
Epoch901 train_loss=37.16730499267578; test_loss=185.10348510742188
Epoch902 train_loss=65.20610046386719; test_loss=182.8196258544922
Epoch903 train_loss=68.7621078491211; test_loss=182.40077209472656
Epoch904 train_loss=50.5511474609375; test_loss=182.91482543945312
Epoch905 train_loss=15.151519775390625; test_loss=181.5933074951172
Epoch906 train_loss=103.6036376953125; test_loss=182.9541015625
Epoch907 train_loss=41.522239685058594; test_loss=182.37237548828125
Epoch908 train_loss=26.166322708129883; test_loss=184.6638946533203
Epoch909 train_loss=71.27238464355469; test_loss=183.178955078125
Epoch910 train_loss=31.508621215820312; test_loss=183.32972717285156
Epoch911 train_loss=19.26111602783203; test_loss=181.46005249023438
Epoch912 train_loss=11.098777770996094; test_loss=181.33250427246094
Epoch913 train_loss=100.42472839355469; test_loss=182.58631896972656
Epoch914 train_loss=22.749099731445312; test_loss=180.8668670654297
Epoch915 train_loss=29.976423263549805; test_loss=183.1990509033203
Epoch916 train_loss=44.131622314453125; test_loss=183.19264221191406
Epoch917 train_loss=56.939208984375; test_loss=182.09164428710938
Epoch918 train_loss=102.16087341308594; test_loss=182.8767852783203
Epoch919 train_loss=34.28094482421875; test_loss=182.920654296875
Epoch920 train_loss=9.631214141845703; test_loss=182.74661254882812
Epoch921 train_loss=47.99931335449219; test_loss=183.37924194335938
Epoch922 train_loss=39.519500732421875; test_loss=183.43138122558594
Epoch923 train_loss=5.643993377685547; test_loss=181.6475067138672
Epoch924 train_loss=39.08909606933594; test_loss=181.31788635253906
Epoch925 train_loss=20.845870971679688; test_loss=182.90771484375
Epoch926 train_loss=16.3916015625; test_loss=181.9959259033203
Epoch927 train_loss=22.105789184570312; test_loss=183.97898864746094
Epoch928 train_loss=18.301971435546875; test_loss=183.42041015625
Epoch929 train_loss=42.439979553222656; test_loss=183.55772399902344
Epoch930 train_loss=165.36746215820312; test_loss=181.56271362304688
Epoch931 train_loss=30.245574951171875; test_loss=183.1807098388672
Epoch932 train_loss=18.560531616210938; test_loss=182.9733123779297
Epoch933 train_loss=43.40848922729492; test_loss=182.94879150390625
Epoch934 train_loss=28.226776123046875; test_loss=183.45266723632812
Epoch935 train_loss=12.772136688232422; test_loss=183.05703735351562
Epoch936 train_loss=43.523719787597656; test_loss=183.69078063964844
Epoch937 train_loss=27.858413696289062; test_loss=185.45899963378906
Epoch938 train_loss=63.30756378173828; test_loss=182.80055236816406
Epoch939 train_loss=37.4042854309082; test_loss=182.73487854003906
Epoch940 train_loss=46.39252471923828; test_loss=184.67713928222656
Epoch941 train_loss=50.32411193847656; test_loss=182.7235107421875
Epoch942 train_loss=68.49283599853516; test_loss=184.73736572265625
Epoch943 train_loss=27.56939697265625; test_loss=183.55410766601562
Epoch944 train_loss=28.466415405273438; test_loss=182.6914520263672
Epoch945 train_loss=14.92129898071289; test_loss=184.18846130371094
Epoch946 train_loss=41.242095947265625; test_loss=183.51876831054688
Epoch947 train_loss=61.50617980957031; test_loss=184.09042358398438
Epoch948 train_loss=20.868881225585938; test_loss=182.26785278320312
Epoch949 train_loss=70.09915924072266; test_loss=183.43478393554688
Epoch950 train_loss=22.090316772460938; test_loss=180.3603973388672
Epoch951 train_loss=29.629119873046875; test_loss=182.32191467285156
Epoch952 train_loss=43.589080810546875; test_loss=183.0729217529297
Epoch953 train_loss=35.911865234375; test_loss=182.9369659423828
Epoch954 train_loss=51.710113525390625; test_loss=183.28724670410156
Epoch955 train_loss=40.23988342285156; test_loss=182.1758270263672
Epoch956 train_loss=54.942684173583984; test_loss=184.3721466064453
Epoch957 train_loss=15.468067169189453; test_loss=185.19924926757812
Epoch958 train_loss=30.763404846191406; test_loss=184.1902618408203
Epoch959 train_loss=60.899070739746094; test_loss=183.5238494873047
Epoch960 train_loss=52.05125427246094; test_loss=180.97738647460938
Epoch961 train_loss=38.925262451171875; test_loss=181.63267517089844
Epoch962 train_loss=24.967979431152344; test_loss=182.54124450683594
Epoch963 train_loss=66.67701721191406; test_loss=181.57569885253906
Epoch964 train_loss=14.87054443359375; test_loss=183.18099975585938
Epoch965 train_loss=23.238815307617188; test_loss=182.73898315429688
Epoch966 train_loss=7.874370574951172; test_loss=182.76190185546875
Epoch967 train_loss=19.096580505371094; test_loss=182.2040252685547
Epoch968 train_loss=43.625518798828125; test_loss=183.1695556640625
Epoch969 train_loss=24.984512329101562; test_loss=182.4524383544922
Epoch970 train_loss=39.15217590332031; test_loss=179.7207794189453
Epoch971 train_loss=49.630340576171875; test_loss=182.84930419921875
Epoch972 train_loss=36.48674011230469; test_loss=181.0131072998047
Epoch973 train_loss=14.923503875732422; test_loss=180.76251220703125
Epoch974 train_loss=53.697418212890625; test_loss=182.7642364501953
Epoch975 train_loss=31.677127838134766; test_loss=182.59925842285156
Epoch976 train_loss=27.053634643554688; test_loss=180.70831298828125
Epoch977 train_loss=32.24394226074219; test_loss=181.58837890625
Epoch978 train_loss=25.90291976928711; test_loss=183.43919372558594
Epoch979 train_loss=31.30669403076172; test_loss=184.92999267578125
Epoch980 train_loss=55.904144287109375; test_loss=181.86260986328125
Epoch981 train_loss=20.954666137695312; test_loss=184.3264617919922
Epoch982 train_loss=30.844635009765625; test_loss=183.7881622314453
Epoch983 train_loss=26.07171630859375; test_loss=183.059814453125
Epoch984 train_loss=21.255443572998047; test_loss=182.6802215576172
Epoch985 train_loss=54.83941650390625; test_loss=183.89552307128906
Epoch986 train_loss=33.63173294067383; test_loss=181.81893920898438
Epoch987 train_loss=26.190427780151367; test_loss=180.75491333007812
Epoch988 train_loss=15.43994140625; test_loss=183.12762451171875
Epoch989 train_loss=13.978839874267578; test_loss=182.48631286621094
Epoch990 train_loss=40.89390563964844; test_loss=183.41566467285156
Epoch991 train_loss=41.54765319824219; test_loss=182.98472595214844
Epoch992 train_loss=40.46238708496094; test_loss=181.7471466064453
Epoch993 train_loss=24.006114959716797; test_loss=182.49716186523438
Epoch994 train_loss=46.65782165527344; test_loss=184.70433044433594
Epoch995 train_loss=45.113319396972656; test_loss=182.28065490722656
Epoch996 train_loss=19.480072021484375; test_loss=184.4874725341797
Epoch997 train_loss=31.572830200195312; test_loss=182.58547973632812
Epoch998 train_loss=10.078598022460938; test_loss=184.51307678222656
Epoch999 train_loss=45.804222106933594; test_loss=185.54833984375
Epoch1000 train_loss=34.97251510620117; test_loss=183.36358642578125
------------------------------------------- TRAINING SCORES -------------------------------------------
Overall MAE: 79.71059091377258 +/- 84.49931945965058
Overall RMSE: 111.38372660827636 +/- 131.9757459665678
Overall MAPE: 9.910053818970919 +/- 15.004621771562721
Overall R2: 81.50300378203393 +/- 111.02166223520635
------------------------------------------- TESTING SCORES -------------------------------------------
Overall MAE: 173.95032133483886 +/- 8.368138305017702
Overall RMSE: 286.0597731628418 +/- 12.17003063101541
Overall MAPE: 22.35633151245117 +/- 0.932604751653328
Overall R2: 82.89831009507179 +/- 1.431324887029879
Wall time: 9min 47s
%%time
# here are the best parameters for the PCA hyperparameter search:
# lr: 0.0010009502114520618
# batch_size: 16
# n_layers: 3
# neurons_HL1: 1024
# HL0_ac_fn: relu
# HL1_ac_fn: relu
# HL2_ac_fn: relu
# Sequential(
# (0): Linear(in_features=25, out_features=1024, bias=True)
# (1): ReLU()
# (2): Linear(in_features=1024, out_features=512, bias=True)
# (3): ReLU()
# (4): Linear(in_features=512, out_features=256, bias=True)
# (5): ReLU()
# (6): Linear(in_features=256, out_features=1, bias=True)
# )
metrics = [RootMeanSquaredError(), "mean_absolute_percentage_error",
"mean_absolute_error"]
num_folds = 10
kfold = KFold(n_splits=10, shuffle=True, random_state=1)
# we define the cross validator, and other variables:
lr = 0.0010009502114520618
batch_size = 16
num_epochs = 100
num_folds = 10
kfold = KFold(n_splits=num_folds, shuffle=True, random_state=1)
fold_num = 1
MAPE_train = []
MAPE_scores = []
RMSE_train = []
RMSE_scores = []
MAE_train = []
MAE_scores = []
R2_train = []
R2_scores = []
low_MAPE_scores = []
low_RMSE_scores = []
low_MAE_scores = []
low_R2_scores = []
med_MAPE_scores = []
med_RMSE_scores = []
med_MAE_scores = []
med_R2_scores = []
high_MAPE_scores = []
high_RMSE_scores = []
high_MAE_scores = []
high_R2_scores = []
# we convert PCA_X and Y into tensors for pytorch:
# torch_X, torch_Y = torch.from_numpy(PCA_X).float(), torch.reshape(torch.from_numpy(Y).float(), (6746, 1))
for train, test in kfold.split(PCA_X, Y):
# now our data is ready to go into our model.
model = Sequential([
Dense(1024, activation='relu', input_shape=(PCA_X.shape[1],)),
Dense(512, activation='relu'),
Dense(256, activation='relu'),
Dense(1, activation='linear')
])
model.compile(loss='mae', optimizer=Adam(learning_rate=lr), metrics=metrics)
history = model.fit(PCA_X[train], Y[train], batch_size=16, epochs=100, verbose=False)
scores = model.evaluate(PCA_X[test], Y[test], verbose=False)
# we split the Y[test] into 3 inclusive volumes: low (0-299), medium (300-750), high (>= 751)
# here test and Y[test] are numpy ndarrays
# test has shape (675,), Y[test] has size around (675,45)
# Y[test][0] is the same as calling Y[test[0]] which has shape (45,)
# print("test.shape", test.shape)
# print(test)
# print("Y[test].shape",Y[test].shape)
# print(Y[test])
low_Y_test_mask = np.all([Y[test] >= 0, Y[test] <=299], axis=0)
med_Y_test_mask = np.all([Y[test] >= 300, Y[test] <=750], axis=0)
high_Y_test_mask = np.all([Y[test] >= 751],axis=0)
# print(low_Y_test_mask.shape)
# print(low_Y_test_mask)
# print("Y[test][low_Y_test_mask].shape",Y[test][low_Y_test_mask].shape)
# print(Y[test][low_Y_test_mask])
# print(med_Y_test_mask.shape)
# print(med_Y_test_mask)
# print("Y[test][med_Y_test_mask].shape",Y[test][med_Y_test_mask].shape)
# print(Y[test][med_Y_test_mask])
# print(high_Y_test_mask.shape)
# print(high_Y_test_mask)
# print("Y[test][high_Y_test_mask].shape",Y[test][high_Y_test_mask].shape)
# print(Y[test][high_Y_test_mask])
low_scores = model.evaluate(PCA_X[test][low_Y_test_mask], Y[test][low_Y_test_mask], verbose=False)
med_scores = model.evaluate(PCA_X[test][med_Y_test_mask], Y[test][med_Y_test_mask], verbose=False)
high_scores = model.evaluate(PCA_X[test][high_Y_test_mask], Y[test][high_Y_test_mask], verbose=False)
low_MAPE_scores.append(low_scores[2])
low_RMSE_scores.append(low_scores[1])
low_MAE_scores.append(low_scores[0])
low_R2_scores.append(r2_score(Y[test][low_Y_test_mask], model.predict(PCA_X[test][low_Y_test_mask])))
med_MAPE_scores.append(med_scores[2])
med_RMSE_scores.append(med_scores[1])
med_MAE_scores.append(med_scores[0])
med_R2_scores.append(r2_score(Y[test][med_Y_test_mask], model.predict(PCA_X[test][med_Y_test_mask])))
high_MAPE_scores.append(high_scores[2])
high_RMSE_scores.append(high_scores[1])
high_MAE_scores.append(high_scores[0])
high_R2_scores.append(r2_score(Y[test][high_Y_test_mask], model.predict(PCA_X[test][high_Y_test_mask])))
train_r2 = r2_score(Y[train], model.predict(PCA_X[train]))
R2_train.append(train_r2)
score_r2 = r2_score(Y[test], model.predict(PCA_X[test]))
R2_scores.append(score_r2)
MAPE_train.append(history.history.get(
'mean_absolute_percentage_error')[-1])
MAPE_scores.append(scores[2])
RMSE_train.append(history.history.get('root_mean_squared_error')[-1])
RMSE_scores.append(scores[1])
MAE_train.append(history.history.get('loss')[-1])
MAE_scores.append(scores[0])
print(
f"------------------------------------------Fold {fold_num}------------------------------------------")
print(
f"\tTrain Loss: {history.history.get('loss')[-1]}\tScore Loss: {scores[0]}")
print(
f"\tTrain RMSE: {history.history.get('root_mean_squared_error')[-1]}\tScore RMSE: {scores[1]}")
print(
f"\tTrain MAPE: {history.history.get('mean_absolute_percentage_error')[-1]}\tScore MAPE: {scores[2]}")
if scores[2] == 100.0:
print(history.history.get('mean_absolute_percentage_error'))
print(f"\tTrain R2: {train_r2}\tScore R2: {score_r2}")
print(f"\n\t3-VOLUME SCORES: LOW (n={Y[test][low_Y_test_mask].shape[0]}), MED (n={Y[test][med_Y_test_mask].shape[0]}), HIGH (n={Y[test][high_Y_test_mask].shape[0]}), TOTAL IN FOLD (n={test.shape[0]})")
print(f"\tLow Loss: {low_MAE_scores[-1]}\tMed Loss: {med_MAE_scores[-1]}\tHigh Loss: {high_MAE_scores[-1]}")
print(f"\tLow RMSE: {low_RMSE_scores[-1]}\tMed RMSE: {med_RMSE_scores[-1]}\tHigh RMSE: {high_RMSE_scores[-1]}")
print(f"\tLow MAPE: {low_MAPE_scores[-1]}\tMed MAPE: {med_MAPE_scores[-1]}\tHigh MAPE: {high_MAPE_scores[-1]}")
print(f"\tLow R2: {low_R2_scores[-1]}\tMed R2: {med_R2_scores[-1]}\tHigh R2: {high_R2_scores[-1]}")
fold_num += 1
print(f"\n\nLow MAE Average: {np.mean(low_MAE_scores)} +/- {np.std(low_MAE_scores)}")
print(f"Low RMSE Average: {np.mean(low_RMSE_scores)} +/- {np.std(low_RMSE_scores)}")
print(f"Low MAPE Average: {np.mean(low_MAPE_scores)} +/- {np.std(low_MAPE_scores)}")
print(f"Low R2 Average: {np.mean(low_R2_scores)*100} +/- {np.std(low_R2_scores)*100}\n")
print(f"\nMed MAE Average: {np.mean(med_MAE_scores)} +/- {np.std(med_MAE_scores)}")
print(f"Med RMSE Average: {np.mean(med_RMSE_scores)} +/- {np.std(med_RMSE_scores)}")
print(f"Med MAPE Average: {np.mean(med_MAPE_scores)} +/- {np.std(med_MAPE_scores)}")
print(f"Med R2 Average: {np.mean(med_R2_scores)*100} +/- {np.std(med_R2_scores)*100}\n")
print(f"\nHigh MAE Average: {np.mean(high_MAE_scores)} +/- {np.std(high_MAE_scores)}")
print(f"High RMSE Average: {np.mean(high_RMSE_scores)} +/- {np.std(high_RMSE_scores)}")
print(f"High MAPE Average: {np.mean(high_MAPE_scores)} +/- {np.std(high_MAPE_scores)}")
print(f"High R2 Average: {np.mean(high_R2_scores)*100} +/- {np.std(high_R2_scores)*100}\n")
print(f"\n\nOverall MAE: {np.mean(MAE_scores)} +/- {np.std(MAE_scores)}")
print(f"Overall RMSE: {np.mean(RMSE_scores)} +/- {np.std(RMSE_scores)}")
print(f"Overall MAPE: {np.mean(MAPE_scores)} +/- {np.std(MAPE_scores)}")
print(f"Overall R2: {np.mean(R2_scores)*100} +/- {np.std(R2_scores)*100}\n\n")
print("Delimited table:")
print("MAPE\tRMSE\tMAE\tR2")
for i in range(0, 10):
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f" %
(MAPE_train[i], MAPE_scores[i], RMSE_train[i], RMSE_scores[i], MAE_train[i], MAE_scores[i], R2_train[i], R2_scores[i]))
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t<===Averages" % (np.mean(MAPE_train), np.mean(MAPE_scores),
np.mean(RMSE_train), np.mean(RMSE_scores), np.mean(MAE_train), np.mean(MAE_scores), np.mean(R2_train), np.mean(R2_scores)))
------------------------------------------Fold 1------------------------------------------ Train Loss: 127.81013488769531 Score Loss: 154.7947998046875 Train RMSE: 228.13241577148438 Score RMSE: 260.9296569824219 Train MAPE: 15.592364311218262 Score MAPE: 23.83774185180664 Train R2: 0.8997540280663398 Score R2: 0.8655906649950476 3-VOLUME SCORES: LOW (n=88), MED (n=178), HIGH (n=409), TOTAL IN FOLD (n=675) Low Loss: 96.77949523925781 Med Loss: 151.1336212158203 High Loss: 168.87066650390625 Low RMSE: 220.55447387695312 Med RMSE: 282.9859313964844 High RMSE: 258.9322204589844 Low MAPE: 69.3411865234375 Med MAPE: 27.834455490112305 High MAPE: 12.307868957519531 Low R2: -9.2200174226078 Med R2: -4.155332412092903 High R2: 0.7979124649029696 ------------------------------------------Fold 2------------------------------------------ Train Loss: 131.28802490234375 Score Loss: 137.11680603027344 Train RMSE: 236.45433044433594 Score RMSE: 212.70245361328125 Train MAPE: 16.429386138916016 Score MAPE: 20.399553298950195 Train R2: 0.8957725689896933 Score R2: 0.9040436124076208 3-VOLUME SCORES: LOW (n=94), MED (n=166), HIGH (n=415), TOTAL IN FOLD (n=675) Low Loss: 45.35829544067383 Med Loss: 136.8745880126953 High Loss: 157.99752807617188 Low RMSE: 72.359130859375 Med RMSE: 220.00363159179688 High RMSE: 230.30487060546875 Low MAPE: 47.15357971191406 Med MAPE: 27.075597763061523 High MAPE: 11.66918659210205 Low R2: -0.08106707352635678 Med R2: -1.8328400534362328 High R2: 0.819505383238484 ------------------------------------------Fold 3------------------------------------------ Train Loss: 127.7142333984375 Score Loss: 154.29220581054688 Train RMSE: 229.74073791503906 Score RMSE: 263.18243408203125 Train MAPE: 16.00261688232422 Score MAPE: 21.070749282836914 Train R2: 0.8987809723841674 Score R2: 0.8702570829364562 3-VOLUME SCORES: LOW (n=100), MED (n=159), HIGH (n=416), TOTAL IN FOLD (n=675) Low Loss: 62.500091552734375 Med Loss: 149.18319702148438 High Loss: 178.3103790283203 Low RMSE: 130.94033813476562 Med RMSE: 297.3729248046875 High RMSE: 272.88897705078125 Low MAPE: 43.12859344482422 Med MAPE: 29.70044708251953 High MAPE: 12.470012664794922 Low R2: -3.03775822172389 Med R2: -4.201401927967132 High R2: 0.7864924323390385 ------------------------------------------Fold 4------------------------------------------ Train Loss: 130.9394073486328 Score Loss: 152.30174255371094 Train RMSE: 231.63134765625 Score RMSE: 258.0766906738281 Train MAPE: 16.633272171020508 Score MAPE: 25.981096267700195 Train R2: 0.8915152056578826 Score R2: 0.8623806723364293 3-VOLUME SCORES: LOW (n=98), MED (n=164), HIGH (n=413), TOTAL IN FOLD (n=675) Low Loss: 69.88553619384766 Med Loss: 130.81964111328125 High Loss: 180.3885498046875 Low RMSE: 162.71002197265625 Med RMSE: 276.5401611328125 High RMSE: 268.71185302734375 Low MAPE: 79.91934967041016 Med MAPE: 24.730491638183594 High MAPE: 13.678803443908691 Low R2: -4.538366241694128 Med R2: -4.746133667188896 High R2: 0.7553667170325372 ------------------------------------------Fold 5------------------------------------------ Train Loss: 129.73695373535156 Score Loss: 176.1621551513672 Train RMSE: 229.5774383544922 Score RMSE: 290.10552978515625 Train MAPE: 16.4157772064209 Score MAPE: 23.34259033203125 Train R2: 0.8973933788682711 Score R2: 0.836986666859763 3-VOLUME SCORES: LOW (n=107), MED (n=147), HIGH (n=421), TOTAL IN FOLD (n=675) Low Loss: 65.00853729248047 Med Loss: 143.42543029785156 High Loss: 215.84320068359375 Low RMSE: 155.13783264160156 Med RMSE: 272.5326232910156 High RMSE: 320.7595520019531 Low MAPE: 44.61845016479492 Med MAPE: 29.090774536132812 High MAPE: 15.928099632263184 Low R2: -4.438510787874399 Med R2: -3.243643284961733 High R2: 0.6980457784544142 ------------------------------------------Fold 6------------------------------------------ Train Loss: 130.02989196777344 Score Loss: 148.5319366455078 Train RMSE: 230.93060302734375 Score RMSE: 253.60423278808594 Train MAPE: 16.465152740478516 Score MAPE: 19.8250789642334 Train R2: 0.8996410113608634 Score R2: 0.8627133538149295 3-VOLUME SCORES: LOW (n=101), MED (n=164), HIGH (n=410), TOTAL IN FOLD (n=675) Low Loss: 57.14979934692383 Med Loss: 115.82070922851562 High Loss: 184.12765502929688 Low RMSE: 105.84821319580078 Med RMSE: 225.80239868164062 High RMSE: 287.6282043457031 Low MAPE: 38.56513214111328 Med MAPE: 22.827890396118164 High MAPE: 14.007501602172852 Low R2: -1.8816983468744453 Med R2: -2.1812217083667385 High R2: 0.7351292903979412 ------------------------------------------Fold 7------------------------------------------ Train Loss: 130.3003692626953 Score Loss: 159.8907470703125 Train RMSE: 231.0568084716797 Score RMSE: 273.3282165527344 Train MAPE: 16.445106506347656 Score MAPE: 19.791595458984375 Train R2: 0.9016465838910616 Score R2: 0.8508562427258288 3-VOLUME SCORES: LOW (n=87), MED (n=155), HIGH (n=432), TOTAL IN FOLD (n=674) Low Loss: 58.31989288330078 Med Loss: 185.43472290039062 High Loss: 171.18092346191406 Low RMSE: 104.64787292480469 Med RMSE: 372.697021484375 High RMSE: 253.9989471435547 Low MAPE: 30.52621841430664 Med MAPE: 35.33026123046875 High MAPE: 12.054547309875488 Low R2: -1.4930927362429571 Med R2: -7.089386211819177 High R2: 0.7982647454442544 ------------------------------------------Fold 8------------------------------------------ Train Loss: 129.082275390625 Score Loss: 146.95050048828125 Train RMSE: 231.568359375 Score RMSE: 243.84494018554688 Train MAPE: 16.338773727416992 Score MAPE: 24.216989517211914 Train R2: 0.896980704511479 Score R2: 0.8743238298533927 3-VOLUME SCORES: LOW (n=111), MED (n=157), HIGH (n=406), TOTAL IN FOLD (n=674) Low Loss: 70.19298553466797 Med Loss: 114.89022827148438 High Loss: 180.33360290527344 Low RMSE: 144.12730407714844 Med RMSE: 212.0202178955078 High RMSE: 275.041259765625 Low MAPE: 63.80789566040039 Med MAPE: 22.57050895690918 High MAPE: 14.029571533203125 Low R2: -3.2627547032209208 Med R2: -2.1731230838566105 High R2: 0.7612579719419048 ------------------------------------------Fold 9------------------------------------------ Train Loss: 131.6502227783203 Score Loss: 145.08148193359375 Train RMSE: 231.95997619628906 Score RMSE: 258.6912536621094 Train MAPE: 16.417850494384766 Score MAPE: 21.815244674682617 Train R2: 0.8997251688120269 Score R2: 0.8599134264113251 3-VOLUME SCORES: LOW (n=109), MED (n=176), HIGH (n=389), TOTAL IN FOLD (n=674) Low Loss: 76.66755676269531 Med Loss: 149.67120361328125 High Loss: 162.17483520507812 Low RMSE: 195.68182373046875 Med RMSE: 299.4588623046875 High RMSE: 254.26040649414062 Low MAPE: 42.43794631958008 Med MAPE: 30.51164436340332 High MAPE: 12.102025985717773 Low R2: -9.78997983867138 Med R2: -4.427694805430621 High R2: 0.786114269164181 ------------------------------------------Fold 10------------------------------------------ Train Loss: 128.51712036132812 Score Loss: 160.7220458984375 Train RMSE: 231.2304229736328 Score RMSE: 249.83401489257812 Train MAPE: 16.345561981201172 Score MAPE: 18.371545791625977 Train R2: 0.8988052301023041 Score R2: 0.8778028324313383 3-VOLUME SCORES: LOW (n=79), MED (n=160), HIGH (n=435), TOTAL IN FOLD (n=674) Low Loss: 61.03449630737305 Med Loss: 121.56182861328125 High Loss: 193.22999572753906 Low RMSE: 102.76162719726562 Med RMSE: 223.32443237304688 High RMSE: 276.4929504394531 Low MAPE: 35.1083869934082 Med MAPE: 24.009653091430664 High MAPE: 13.258193969726562 Low R2: -2.1150601709666272 Med R2: -2.26892710145397 High R2: 0.7598406880386884 Low MAE Average: 66.2896686553955 +/- 13.01649792677199 Low RMSE Average: 139.47686386108398 +/- 43.369187502467035 Low MAPE Average: 49.46067390441895 +/- 15.265014962856412 Low R2 Average: -398.58305543402906 +/- 303.98162748001045 Med MAE Average: 139.8815170288086 +/- 20.06708782009466 Med RMSE Average: 268.2738204956055 +/- 47.27063963042758 Med MAPE Average: 27.368172454833985 +/- 3.792820910834651 Med R2 Average: -363.1970425657401 +/- 154.61204194990552 High MAE Average: 179.24573364257813 +/- 15.74076461773856 High RMSE Average: 269.9019241333008 +/- 22.7304537426223 High MAPE Average: 13.150581169128419 +/- 1.232102683829606 High R2 Average: 76.97929740954413 +/- 3.3685560601484745 Overall MAE: 153.58444213867188 +/- 10.0851354594426 Overall RMSE: 256.42994232177733 +/- 19.013763934690463 Overall MAPE: 21.865218544006346 +/- 2.28022886255349 Overall R2: 86.64868384772133 +/- 1.6744803591905228 Delimited table: MAPE RMSE MAE R2 15.59/23.84 228.13/260.93 127.81/154.79 0.90/0.87 16.43/20.40 236.45/212.70 131.29/137.12 0.90/0.90 16.00/21.07 229.74/263.18 127.71/154.29 0.90/0.87 16.63/25.98 231.63/258.08 130.94/152.30 0.89/0.86 16.42/23.34 229.58/290.11 129.74/176.16 0.90/0.84 16.47/19.83 230.93/253.60 130.03/148.53 0.90/0.86 16.45/19.79 231.06/273.33 130.30/159.89 0.90/0.85 16.34/24.22 231.57/243.84 129.08/146.95 0.90/0.87 16.42/21.82 231.96/258.69 131.65/145.08 0.90/0.86 16.35/18.37 231.23/249.83 128.52/160.72 0.90/0.88 16.31/21.87 231.23/256.43 129.71/153.58 0.90/0.87 <===Averages Wall time: 14min 7s
%%time
# USING PCA MODEL ON ALL 45 VARS: ======================================================================================
# here are the best parameters for the PCA hyperparameter search:
# lr: 0.0010009502114520618
# batch_size: 16
# n_layers: 3
# neurons_HL1: 1024
# HL0_ac_fn: relu
# HL1_ac_fn: relu
# HL2_ac_fn: relu
# Sequential(
# (0): Linear(in_features=25, out_features=1024, bias=True)
# (1): ReLU()
# (2): Linear(in_features=1024, out_features=512, bias=True)
# (3): ReLU()
# (4): Linear(in_features=512, out_features=256, bias=True)
# (5): ReLU()
# (6): Linear(in_features=256, out_features=1, bias=True)
# )
metrics = [RootMeanSquaredError(), "mean_absolute_percentage_error",
"mean_absolute_error"]
num_folds = 10
kfold = KFold(n_splits=10, shuffle=True, random_state=1)
# we define the cross validator, and other variables:
lr = 0.0010009502114520618
batch_size = 16
num_epochs = 100
num_folds = 10
kfold = KFold(n_splits=num_folds, shuffle=True, random_state=1)
fold_num = 1
MAPE_train = []
MAPE_scores = []
RMSE_train = []
RMSE_scores = []
MAE_train = []
MAE_scores = []
R2_train = []
R2_scores = []
# we convert PCA_X and Y into tensors for pytorch:
# torch_X, torch_Y = torch.from_numpy(PCA_X).float(), torch.reshape(torch.from_numpy(Y).float(), (6746, 1))
for train, test in kfold.split(X, Y):
# now our data is ready to go into our model.
model = Sequential([
Dense(1024, activation='relu', input_shape=(X.shape[1],)),
Dense(512, activation='relu'),
Dense(256, activation='relu'),
Dense(1, activation='linear')
])
model.compile(loss='mae', optimizer=Adam(learning_rate=lr), metrics=metrics)
history = model.fit(X[train], Y[train], batch_size=16, epochs=100, verbose=False)
scores = model.evaluate(X[test], Y[test], verbose=False)
train_r2 = r2_score(Y[train], model.predict(X[train]))
R2_train.append(train_r2)
score_r2 = r2_score(Y[test], model.predict(X[test]))
R2_scores.append(score_r2)
MAPE_train.append(history.history.get(
'mean_absolute_percentage_error')[-1])
MAPE_scores.append(scores[2])
RMSE_train.append(history.history.get('root_mean_squared_error')[-1])
RMSE_scores.append(scores[1])
MAE_train.append(history.history.get('loss')[-1])
MAE_scores.append(scores[0])
print(
f"------------------------------------------Fold {fold_num}------------------------------------------")
print(
f"\tTrain Loss: {history.history.get('loss')[-1]}\tScore Loss: {scores[0]}")
print(
f"\tTrain RMSE: {history.history.get('root_mean_squared_error')[-1]}\tScore RMSE: {scores[1]}")
print(
f"\tTrain MAPE: {history.history.get('mean_absolute_percentage_error')[-1]}\tScore MAPE: {scores[2]}")
if scores[2] == 100.0:
print(history.history.get('mean_absolute_percentage_error'))
print(f"\tTrain R2: {train_r2}\tScore R2: {score_r2}")
fold_num += 1
print(f"\n\nOverall MAE: {np.mean(MAE_scores)} +/- {np.std(MAE_scores)}")
print(f"Overall RMSE: {np.mean(RMSE_scores)} +/- {np.std(RMSE_scores)}")
print(f"Overall MAPE: {np.mean(MAPE_scores)} +/- {np.std(MAPE_scores)}")
print(f"Overall R2: {np.mean(R2_scores)*100} +/- {np.std(R2_scores)*100}\n\n")
print("Delimited table:")
print("MAPE\tRMSE\tMAE\tR2")
for i in range(0, 10):
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f" %
(MAPE_train[i], MAPE_scores[i], RMSE_train[i], RMSE_scores[i], MAE_train[i], MAE_scores[i], R2_train[i], R2_scores[i]))
print("%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t%.2f/%.2f\t<===Averages" % (np.mean(MAPE_train), np.mean(MAPE_scores),
np.mean(RMSE_train), np.mean(RMSE_scores), np.mean(MAE_train), np.mean(MAE_scores), np.mean(R2_train), np.mean(R2_scores)))
------------------------------------------Fold 1------------------------------------------ Train Loss: 364.1595153808594 Score Loss: 371.2342224121094 Train RMSE: 485.85076904296875 Score RMSE: 500.6285095214844 Train MAPE: 54.10270690917969 Score MAPE: 61.44056701660156 Train R2: 0.5112382796125474 Score R2: 0.5052182052108128 ------------------------------------------Fold 2------------------------------------------ Train Loss: 371.7135925292969 Score Loss: 374.3075866699219 Train RMSE: 494.5903625488281 Score RMSE: 488.31768798828125 Train MAPE: 55.92213439941406 Score MAPE: 67.00104522705078 Train R2: 0.5193315195515641 Score R2: 0.49425240425096606 ------------------------------------------Fold 3------------------------------------------ Train Loss: 370.8609924316406 Score Loss: 372.376953125 Train RMSE: 496.1138610839844 Score RMSE: 498.52752685546875 Train MAPE: 55.38139724731445 Score MAPE: 58.983604431152344 Train R2: 0.4923818974798867 Score R2: 0.5344694526622376 ------------------------------------------Fold 4------------------------------------------ Train Loss: 370.7597961425781 Score Loss: 367.1113586425781 Train RMSE: 495.39263916015625 Score RMSE: 484.9002990722656 Train MAPE: 55.43253707885742 Score MAPE: 62.93134689331055 Train R2: 0.5249266755379115 Score R2: 0.5141674297628256 ------------------------------------------Fold 5------------------------------------------ Train Loss: 373.2472839355469 Score Loss: 356.1221008300781 Train RMSE: 496.1756286621094 Score RMSE: 484.1694641113281 Train MAPE: 57.34025955200195 Score MAPE: 49.963401794433594 Train R2: 0.5169258165573869 Score R2: 0.5459477027496857 ------------------------------------------Fold 6------------------------------------------ Train Loss: 366.0114440917969 Score Loss: 381.56488037109375 Train RMSE: 489.4639892578125 Score RMSE: 501.183837890625 Train MAPE: 55.05022430419922 Score MAPE: 68.34326171875 Train R2: 0.5176570683657526 Score R2: 0.4638215456463207 ------------------------------------------Fold 7------------------------------------------ Train Loss: 370.6028137207031 Score Loss: 384.447998046875 Train RMSE: 495.41802978515625 Score RMSE: 507.0450439453125 Train MAPE: 56.20430374145508 Score MAPE: 56.094486236572266 Train R2: 0.5160674625653576 Score R2: 0.4867496423476476 ------------------------------------------Fold 8------------------------------------------ Train Loss: 372.21624755859375 Score Loss: 371.2703857421875 Train RMSE: 497.3427429199219 Score RMSE: 498.2117004394531 Train MAPE: 55.20555877685547 Score MAPE: 82.82984924316406 Train R2: 0.4771970335910821 Score R2: 0.47536983053477666 ------------------------------------------Fold 9------------------------------------------ Train Loss: 369.674072265625 Score Loss: 378.3420715332031 Train RMSE: 492.7617492675781 Score RMSE: 507.96429443359375 Train MAPE: 55.020225524902344 Score MAPE: 66.46009826660156 Train R2: 0.5039819229332074 Score R2: 0.45986870686381776 ------------------------------------------Fold 10------------------------------------------ Train Loss: 367.3312072753906 Score Loss: 387.4423828125 Train RMSE: 492.9493103027344 Score RMSE: 515.8364868164062 Train MAPE: 55.40748596191406 Score MAPE: 60.57701873779297 Train R2: 0.4832446774953265 Score R2: 0.4790665558543351 Overall MAE: 374.4219940185547 +/- 8.654827545583569 Overall RMSE: 498.67848510742186 +/- 9.841945265076923 Overall MAPE: 63.462467956542966 +/- 8.304776172590866 Overall R2: 49.589314758834256 +/- 2.744354265378543 Delimited table: MAPE RMSE MAE R2 54.10/61.44 485.85/500.63 364.16/371.23 0.51/0.51 55.92/67.00 494.59/488.32 371.71/374.31 0.52/0.49 55.38/58.98 496.11/498.53 370.86/372.38 0.49/0.53 55.43/62.93 495.39/484.90 370.76/367.11 0.52/0.51 57.34/49.96 496.18/484.17 373.25/356.12 0.52/0.55 55.05/68.34 489.46/501.18 366.01/381.56 0.52/0.46 56.20/56.09 495.42/507.05 370.60/384.45 0.52/0.49 55.21/82.83 497.34/498.21 372.22/371.27 0.48/0.48 55.02/66.46 492.76/507.96 369.67/378.34 0.50/0.46 55.41/60.58 492.95/515.84 367.33/387.44 0.48/0.48 55.51/63.46 493.61/498.68 369.66/374.42 0.51/0.50 <===Averages Wall time: 15min 16s